use parking_lot::RwLock;
use std::collections::HashMap;
use std::fmt;
use std::io::Write;
use std::sync::atomic::{AtomicBool, AtomicU64, Ordering};
use std::sync::Arc;
use std::time::{Instant, SystemTime, UNIX_EPOCH};
use crate::observability::{SpanId, TraceId};
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
pub enum LogLevel {
Trace = 0,
Debug = 1,
#[default]
Info = 2,
Warn = 3,
Error = 4,
Fatal = 5,
}
impl LogLevel {
pub fn as_str(&self) -> &'static str {
match self {
LogLevel::Trace => "TRACE",
LogLevel::Debug => "DEBUG",
LogLevel::Info => "INFO",
LogLevel::Warn => "WARN",
LogLevel::Error => "ERROR",
LogLevel::Fatal => "FATAL",
}
}
pub fn parse(s: &str) -> Option<Self> {
match s.to_uppercase().as_str() {
"TRACE" => Some(LogLevel::Trace),
"DEBUG" => Some(LogLevel::Debug),
"INFO" => Some(LogLevel::Info),
"WARN" | "WARNING" => Some(LogLevel::Warn),
"ERROR" => Some(LogLevel::Error),
"FATAL" | "CRITICAL" => Some(LogLevel::Fatal),
_ => None,
}
}
}
impl fmt::Display for LogLevel {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.as_str())
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub enum LogOutput {
#[default]
Text,
Json,
Compact,
Pretty,
}
#[derive(Debug, Clone)]
pub struct TraceContext {
pub trace_id: Option<TraceId>,
pub span_id: Option<SpanId>,
pub parent_span_id: Option<SpanId>,
pub fields: HashMap<String, String>,
}
impl TraceContext {
pub fn new() -> Self {
Self {
trace_id: None,
span_id: None,
parent_span_id: None,
fields: HashMap::new(),
}
}
pub fn with_new_trace() -> Self {
Self {
trace_id: Some(TraceId::new()),
span_id: Some(SpanId::new()),
parent_span_id: None,
fields: HashMap::new(),
}
}
pub fn with_trace_id(mut self, trace_id: TraceId) -> Self {
self.trace_id = Some(trace_id);
self
}
pub fn with_span_id(mut self, span_id: SpanId) -> Self {
self.span_id = Some(span_id);
self
}
pub fn with_field(mut self, key: impl Into<String>, value: impl Into<String>) -> Self {
self.fields.insert(key.into(), value.into());
self
}
}
impl Default for TraceContext {
fn default() -> Self {
Self::new()
}
}
#[derive(Debug, Clone)]
pub struct LogConfig {
pub level: LogLevel,
pub output: LogOutput,
pub trace_correlation: bool,
pub include_timestamps: bool,
pub include_location: bool,
pub include_thread_id: bool,
pub module_levels: HashMap<String, LogLevel>,
pub service_name: String,
pub environment: String,
pub global_fields: HashMap<String, String>,
}
impl Default for LogConfig {
fn default() -> Self {
Self {
level: LogLevel::Info,
output: LogOutput::Text,
trace_correlation: true,
include_timestamps: true,
include_location: false,
include_thread_id: false,
module_levels: HashMap::new(),
service_name: "ringkernel".to_string(),
environment: "development".to_string(),
global_fields: HashMap::new(),
}
}
}
impl LogConfig {
pub fn builder() -> LogConfigBuilder {
LogConfigBuilder::new()
}
pub fn development() -> Self {
Self {
level: LogLevel::Debug,
output: LogOutput::Pretty,
trace_correlation: true,
include_timestamps: true,
include_location: true,
include_thread_id: false,
environment: "development".to_string(),
..Default::default()
}
}
pub fn production() -> Self {
Self {
level: LogLevel::Info,
output: LogOutput::Json,
trace_correlation: true,
include_timestamps: true,
include_location: false,
include_thread_id: true,
environment: "production".to_string(),
..Default::default()
}
}
pub fn effective_level(&self, module: &str) -> LogLevel {
if let Some(&level) = self.module_levels.get(module) {
return level;
}
let mut best_match: Option<(&str, LogLevel)> = None;
for (prefix, &level) in &self.module_levels {
if module.starts_with(prefix) {
match best_match {
None => best_match = Some((prefix, level)),
Some((best_prefix, _)) if prefix.len() > best_prefix.len() => {
best_match = Some((prefix, level));
}
_ => {}
}
}
}
best_match.map(|(_, level)| level).unwrap_or(self.level)
}
}
#[derive(Debug, Default)]
pub struct LogConfigBuilder {
config: LogConfig,
}
impl LogConfigBuilder {
pub fn new() -> Self {
Self {
config: LogConfig::default(),
}
}
pub fn level(mut self, level: LogLevel) -> Self {
self.config.level = level;
self
}
pub fn output(mut self, output: LogOutput) -> Self {
self.config.output = output;
self
}
pub fn with_trace_correlation(mut self, enabled: bool) -> Self {
self.config.trace_correlation = enabled;
self
}
pub fn with_timestamps(mut self, enabled: bool) -> Self {
self.config.include_timestamps = enabled;
self
}
pub fn with_location(mut self, enabled: bool) -> Self {
self.config.include_location = enabled;
self
}
pub fn with_thread_id(mut self, enabled: bool) -> Self {
self.config.include_thread_id = enabled;
self
}
pub fn service_name(mut self, name: impl Into<String>) -> Self {
self.config.service_name = name.into();
self
}
pub fn environment(mut self, env: impl Into<String>) -> Self {
self.config.environment = env.into();
self
}
pub fn module_level(mut self, module: impl Into<String>, level: LogLevel) -> Self {
self.config.module_levels.insert(module.into(), level);
self
}
pub fn global_field(mut self, key: impl Into<String>, value: impl Into<String>) -> Self {
self.config.global_fields.insert(key.into(), value.into());
self
}
pub fn build(self) -> LogConfig {
self.config
}
}
#[derive(Debug, Clone)]
pub struct LogEntry {
pub level: LogLevel,
pub message: String,
pub timestamp: SystemTime,
pub target: Option<String>,
pub file: Option<String>,
pub line: Option<u32>,
pub thread_id: Option<u64>,
pub thread_name: Option<String>,
pub trace_id: Option<TraceId>,
pub span_id: Option<SpanId>,
pub fields: HashMap<String, LogValue>,
}
#[derive(Debug, Clone)]
pub enum LogValue {
String(String),
Int(i64),
Uint(u64),
Float(f64),
Bool(bool),
}
impl fmt::Display for LogValue {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
LogValue::String(s) => write!(f, "{}", s),
LogValue::Int(i) => write!(f, "{}", i),
LogValue::Uint(u) => write!(f, "{}", u),
LogValue::Float(fl) => write!(f, "{}", fl),
LogValue::Bool(b) => write!(f, "{}", b),
}
}
}
impl From<&str> for LogValue {
fn from(s: &str) -> Self {
LogValue::String(s.to_string())
}
}
impl From<String> for LogValue {
fn from(s: String) -> Self {
LogValue::String(s)
}
}
impl From<i64> for LogValue {
fn from(i: i64) -> Self {
LogValue::Int(i)
}
}
impl From<u64> for LogValue {
fn from(u: u64) -> Self {
LogValue::Uint(u)
}
}
impl From<f64> for LogValue {
fn from(f: f64) -> Self {
LogValue::Float(f)
}
}
impl From<bool> for LogValue {
fn from(b: bool) -> Self {
LogValue::Bool(b)
}
}
impl LogEntry {
pub fn new(level: LogLevel, message: impl Into<String>) -> Self {
Self {
level,
message: message.into(),
timestamp: SystemTime::now(),
target: None,
file: None,
line: None,
thread_id: None,
thread_name: None,
trace_id: None,
span_id: None,
fields: HashMap::new(),
}
}
pub fn with_target(mut self, target: impl Into<String>) -> Self {
self.target = Some(target.into());
self
}
pub fn with_trace_context(mut self, ctx: &TraceContext) -> Self {
self.trace_id = ctx.trace_id;
self.span_id = ctx.span_id;
for (k, v) in &ctx.fields {
self.fields.insert(k.clone(), LogValue::String(v.clone()));
}
self
}
pub fn with_field(mut self, key: impl Into<String>, value: impl Into<LogValue>) -> Self {
self.fields.insert(key.into(), value.into());
self
}
pub fn to_json(&self, config: &LogConfig) -> String {
let mut json = String::with_capacity(512);
json.push('{');
if config.include_timestamps {
let ts = self
.timestamp
.duration_since(UNIX_EPOCH)
.map(|d| d.as_millis())
.unwrap_or(0);
json.push_str(&format!(r#""timestamp":{},"#, ts));
}
json.push_str(&format!(r#""level":"{}","#, self.level.as_str()));
let escaped_msg = self.message.replace('\\', "\\\\").replace('"', "\\\"");
json.push_str(&format!(r#""message":"{}","#, escaped_msg));
json.push_str(&format!(r#""service":"{}","#, config.service_name));
json.push_str(&format!(r#""environment":"{}","#, config.environment));
if let Some(ref target) = self.target {
json.push_str(&format!(r#""target":"{}","#, target));
}
if config.include_location {
if let Some(ref file) = self.file {
json.push_str(&format!(r#""file":"{}","#, file));
}
if let Some(line) = self.line {
json.push_str(&format!(r#""line":{},"#, line));
}
}
if config.include_thread_id {
if let Some(tid) = self.thread_id {
json.push_str(&format!(r#""thread_id":{},"#, tid));
}
if let Some(ref name) = self.thread_name {
json.push_str(&format!(r#""thread_name":"{}","#, name));
}
}
if config.trace_correlation {
if let Some(trace_id) = self.trace_id {
json.push_str(&format!(r#""trace_id":"{:032x}","#, trace_id.0));
}
if let Some(span_id) = self.span_id {
json.push_str(&format!(r#""span_id":"{:016x}","#, span_id.0));
}
}
for (k, v) in &config.global_fields {
json.push_str(&format!(r#""{}":"{}","#, k, v));
}
if !self.fields.is_empty() {
json.push_str(r#""fields":{"#);
let mut first = true;
for (k, v) in &self.fields {
if !first {
json.push(',');
}
first = false;
match v {
LogValue::String(s) => {
let escaped = s.replace('\\', "\\\\").replace('"', "\\\"");
json.push_str(&format!(r#""{}":"{}""#, k, escaped));
}
LogValue::Int(i) => json.push_str(&format!(r#""{}":{}""#, k, i)),
LogValue::Uint(u) => json.push_str(&format!(r#""{}":{}""#, k, u)),
LogValue::Float(f) => json.push_str(&format!(r#""{}":{}""#, k, f)),
LogValue::Bool(b) => json.push_str(&format!(r#""{}":{}""#, k, b)),
}
}
json.push_str("},");
}
if json.ends_with(',') {
json.pop();
}
json.push('}');
json
}
pub fn to_text(&self, config: &LogConfig) -> String {
let mut text = String::with_capacity(256);
if config.include_timestamps {
let ts = self
.timestamp
.duration_since(UNIX_EPOCH)
.map(|d| {
let secs = d.as_secs();
let millis = d.subsec_millis();
format!(
"{:04}-{:02}-{:02}T{:02}:{:02}:{:02}.{:03}Z",
1970 + secs / 31536000, ((secs % 31536000) / 2592000) + 1, ((secs % 2592000) / 86400) + 1, (secs % 86400) / 3600, (secs % 3600) / 60, secs % 60, millis
)
})
.unwrap_or_else(|_| "1970-01-01T00:00:00.000Z".to_string());
text.push_str(&ts);
text.push(' ');
}
text.push_str(&format!("{:5} ", self.level.as_str()));
if let Some(ref target) = self.target {
text.push_str(&format!("[{}] ", target));
}
if config.trace_correlation {
if let Some(trace_id) = self.trace_id {
text.push_str(&format!("trace={:032x} ", trace_id.0));
}
}
text.push_str(&self.message);
if !self.fields.is_empty() {
text.push_str(" {");
let mut first = true;
for (k, v) in &self.fields {
if !first {
text.push_str(", ");
}
first = false;
text.push_str(&format!("{}={}", k, v));
}
text.push('}');
}
text
}
}
pub struct StructuredLogger {
config: RwLock<LogConfig>,
context: RwLock<TraceContext>,
log_count: AtomicU64,
error_count: AtomicU64,
enabled: AtomicBool,
start_time: Instant,
sinks: RwLock<Vec<Arc<dyn LogSink>>>,
}
impl StructuredLogger {
pub fn new(config: LogConfig) -> Self {
Self {
config: RwLock::new(config),
context: RwLock::new(TraceContext::new()),
log_count: AtomicU64::new(0),
error_count: AtomicU64::new(0),
enabled: AtomicBool::new(true),
start_time: Instant::now(),
sinks: RwLock::new(vec![]),
}
}
pub fn default_logger() -> Self {
Self::new(LogConfig::default())
}
pub fn development() -> Self {
Self::new(LogConfig::development())
}
pub fn production() -> Self {
Self::new(LogConfig::production())
}
pub fn set_enabled(&self, enabled: bool) {
self.enabled.store(enabled, Ordering::SeqCst);
}
pub fn is_enabled(&self) -> bool {
self.enabled.load(Ordering::SeqCst)
}
pub fn set_config(&self, config: LogConfig) {
*self.config.write() = config;
}
pub fn config(&self) -> LogConfig {
self.config.read().clone()
}
pub fn set_context(&self, context: TraceContext) {
*self.context.write() = context;
}
pub fn context(&self) -> TraceContext {
self.context.read().clone()
}
pub fn start_trace(&self) -> TraceContext {
let ctx = TraceContext::with_new_trace();
*self.context.write() = ctx.clone();
ctx
}
pub fn add_sink(&self, sink: Arc<dyn LogSink>) {
self.sinks.write().push(sink);
}
pub fn log(&self, level: LogLevel, message: &str, fields: &[(&str, &str)]) {
if !self.enabled.load(Ordering::SeqCst) {
return;
}
let config = self.config.read();
if level < config.level {
return;
}
let ctx = self.context.read();
let mut entry = LogEntry::new(level, message).with_trace_context(&ctx);
for (k, v) in fields {
entry = entry.with_field(*k, *v);
}
self.log_count.fetch_add(1, Ordering::Relaxed);
if level >= LogLevel::Error {
self.error_count.fetch_add(1, Ordering::Relaxed);
}
let output = match config.output {
LogOutput::Json => entry.to_json(&config),
LogOutput::Text | LogOutput::Compact | LogOutput::Pretty => entry.to_text(&config),
};
drop(config);
let sinks = self.sinks.read();
for sink in sinks.iter() {
let _ = sink.write(&entry, &output);
}
if sinks.is_empty() {
let _ = writeln!(std::io::stderr(), "{}", output);
}
}
pub fn trace(&self, message: &str, fields: &[(&str, &str)]) {
self.log(LogLevel::Trace, message, fields);
}
pub fn debug(&self, message: &str, fields: &[(&str, &str)]) {
self.log(LogLevel::Debug, message, fields);
}
pub fn info(&self, message: &str, fields: &[(&str, &str)]) {
self.log(LogLevel::Info, message, fields);
}
pub fn warn(&self, message: &str, fields: &[(&str, &str)]) {
self.log(LogLevel::Warn, message, fields);
}
pub fn error(&self, message: &str, fields: &[(&str, &str)]) {
self.log(LogLevel::Error, message, fields);
}
pub fn fatal(&self, message: &str, fields: &[(&str, &str)]) {
self.log(LogLevel::Fatal, message, fields);
}
pub fn stats(&self) -> LoggerStats {
LoggerStats {
log_count: self.log_count.load(Ordering::Relaxed),
error_count: self.error_count.load(Ordering::Relaxed),
uptime: self.start_time.elapsed(),
sink_count: self.sinks.read().len(),
}
}
}
impl Default for StructuredLogger {
fn default() -> Self {
Self::default_logger()
}
}
#[derive(Debug, Clone)]
pub struct LoggerStats {
pub log_count: u64,
pub error_count: u64,
pub uptime: std::time::Duration,
pub sink_count: usize,
}
pub trait LogSink: Send + Sync {
fn write(&self, entry: &LogEntry, formatted: &str) -> Result<(), LogSinkError>;
fn flush(&self) -> Result<(), LogSinkError> {
Ok(())
}
fn name(&self) -> &str;
}
#[derive(Debug)]
pub struct LogSinkError {
pub message: String,
}
impl fmt::Display for LogSinkError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "LogSinkError: {}", self.message)
}
}
impl std::error::Error for LogSinkError {}
pub struct ConsoleSink {
use_stderr: bool,
}
impl ConsoleSink {
pub fn new() -> Self {
Self { use_stderr: true }
}
pub fn stdout() -> Self {
Self { use_stderr: false }
}
pub fn stderr() -> Self {
Self { use_stderr: true }
}
}
impl Default for ConsoleSink {
fn default() -> Self {
Self::new()
}
}
impl LogSink for ConsoleSink {
fn write(&self, _entry: &LogEntry, formatted: &str) -> Result<(), LogSinkError> {
let result = if self.use_stderr {
writeln!(std::io::stderr(), "{}", formatted)
} else {
writeln!(std::io::stdout(), "{}", formatted)
};
result.map_err(|e| LogSinkError {
message: e.to_string(),
})
}
fn name(&self) -> &str {
if self.use_stderr {
"console:stderr"
} else {
"console:stdout"
}
}
}
pub struct MemoryLogSink {
logs: RwLock<Vec<String>>,
capacity: usize,
}
impl MemoryLogSink {
pub fn new(capacity: usize) -> Self {
Self {
logs: RwLock::new(Vec::with_capacity(capacity)),
capacity,
}
}
pub fn logs(&self) -> Vec<String> {
self.logs.read().clone()
}
pub fn clear(&self) {
self.logs.write().clear();
}
pub fn len(&self) -> usize {
self.logs.read().len()
}
pub fn is_empty(&self) -> bool {
self.logs.read().is_empty()
}
}
impl LogSink for MemoryLogSink {
fn write(&self, _entry: &LogEntry, formatted: &str) -> Result<(), LogSinkError> {
let mut logs = self.logs.write();
if logs.len() >= self.capacity {
logs.remove(0);
}
logs.push(formatted.to_string());
Ok(())
}
fn name(&self) -> &str {
"memory"
}
}
pub struct FileLogSink {
path: String,
file: RwLock<Option<std::fs::File>>,
}
impl FileLogSink {
pub fn new(path: impl Into<String>) -> Result<Self, LogSinkError> {
let path = path.into();
let file = std::fs::OpenOptions::new()
.create(true)
.append(true)
.open(&path)
.map_err(|e| LogSinkError {
message: format!("Failed to open log file: {}", e),
})?;
Ok(Self {
path,
file: RwLock::new(Some(file)),
})
}
}
impl LogSink for FileLogSink {
fn write(&self, _entry: &LogEntry, formatted: &str) -> Result<(), LogSinkError> {
let mut guard = self.file.write();
if let Some(ref mut file) = *guard {
writeln!(file, "{}", formatted).map_err(|e| LogSinkError {
message: e.to_string(),
})?;
}
Ok(())
}
fn flush(&self) -> Result<(), LogSinkError> {
let mut guard = self.file.write();
if let Some(ref mut file) = *guard {
file.flush().map_err(|e| LogSinkError {
message: e.to_string(),
})?;
}
Ok(())
}
fn name(&self) -> &str {
&self.path
}
}
use std::sync::OnceLock;
static GLOBAL_LOGGER: OnceLock<StructuredLogger> = OnceLock::new();
pub fn init(config: LogConfig) {
let _ = GLOBAL_LOGGER.set(StructuredLogger::new(config));
}
pub fn logger() -> &'static StructuredLogger {
GLOBAL_LOGGER.get_or_init(StructuredLogger::default_logger)
}
pub fn trace(message: &str, fields: &[(&str, &str)]) {
logger().trace(message, fields);
}
pub fn debug(message: &str, fields: &[(&str, &str)]) {
logger().debug(message, fields);
}
pub fn info(message: &str, fields: &[(&str, &str)]) {
logger().info(message, fields);
}
pub fn warn(message: &str, fields: &[(&str, &str)]) {
logger().warn(message, fields);
}
pub fn error(message: &str, fields: &[(&str, &str)]) {
logger().error(message, fields);
}
pub fn fatal(message: &str, fields: &[(&str, &str)]) {
logger().fatal(message, fields);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_log_level_ordering() {
assert!(LogLevel::Trace < LogLevel::Debug);
assert!(LogLevel::Debug < LogLevel::Info);
assert!(LogLevel::Info < LogLevel::Warn);
assert!(LogLevel::Warn < LogLevel::Error);
assert!(LogLevel::Error < LogLevel::Fatal);
}
#[test]
fn test_log_level_from_str() {
assert_eq!(LogLevel::parse("trace"), Some(LogLevel::Trace));
assert_eq!(LogLevel::parse("DEBUG"), Some(LogLevel::Debug));
assert_eq!(LogLevel::parse("Info"), Some(LogLevel::Info));
assert_eq!(LogLevel::parse("WARNING"), Some(LogLevel::Warn));
assert_eq!(LogLevel::parse("error"), Some(LogLevel::Error));
assert_eq!(LogLevel::parse("FATAL"), Some(LogLevel::Fatal));
assert_eq!(LogLevel::parse("CRITICAL"), Some(LogLevel::Fatal));
assert_eq!(LogLevel::parse("invalid"), None);
}
#[test]
fn test_log_config_builder() {
let config = LogConfig::builder()
.level(LogLevel::Debug)
.output(LogOutput::Json)
.with_trace_correlation(true)
.with_timestamps(true)
.with_location(true)
.service_name("test-service")
.environment("test")
.module_level("ringkernel::k2k", LogLevel::Trace)
.global_field("version", "1.0.0")
.build();
assert_eq!(config.level, LogLevel::Debug);
assert_eq!(config.output, LogOutput::Json);
assert!(config.trace_correlation);
assert!(config.include_timestamps);
assert!(config.include_location);
assert_eq!(config.service_name, "test-service");
assert_eq!(config.environment, "test");
assert_eq!(
config.effective_level("ringkernel::k2k::broker"),
LogLevel::Trace
);
}
#[test]
fn test_log_config_effective_level() {
let config = LogConfig::builder()
.level(LogLevel::Info)
.module_level("ringkernel", LogLevel::Debug)
.module_level("ringkernel::k2k", LogLevel::Trace)
.build();
assert_eq!(config.effective_level("other::module"), LogLevel::Info);
assert_eq!(config.effective_level("ringkernel::core"), LogLevel::Debug);
assert_eq!(config.effective_level("ringkernel::k2k"), LogLevel::Trace);
assert_eq!(
config.effective_level("ringkernel::k2k::broker"),
LogLevel::Trace
);
}
#[test]
fn test_trace_context() {
let ctx = TraceContext::with_new_trace()
.with_field("user_id", "123")
.with_field("request_id", "abc");
assert!(ctx.trace_id.is_some());
assert!(ctx.span_id.is_some());
assert_eq!(ctx.fields.get("user_id"), Some(&"123".to_string()));
}
#[test]
fn test_log_entry_json() {
let config = LogConfig::builder()
.service_name("test")
.environment("dev")
.with_timestamps(false)
.with_trace_correlation(false)
.build();
let entry = LogEntry::new(LogLevel::Info, "Test message").with_field("key", "value");
let json = entry.to_json(&config);
assert!(json.contains(r#""level":"INFO""#));
assert!(json.contains(r#""message":"Test message""#));
assert!(json.contains(r#""service":"test""#));
}
#[test]
fn test_log_entry_text() {
let config = LogConfig::builder()
.with_timestamps(false)
.with_trace_correlation(false)
.build();
let entry = LogEntry::new(LogLevel::Warn, "Warning!").with_target("test::module");
let text = entry.to_text(&config);
assert!(text.contains("WARN"));
assert!(text.contains("[test::module]"));
assert!(text.contains("Warning!"));
}
#[test]
fn test_structured_logger() {
let logger = StructuredLogger::new(LogConfig::builder().level(LogLevel::Debug).build());
let sink = Arc::new(MemoryLogSink::new(100));
logger.add_sink(sink.clone());
logger.info("Test message", &[("key", "value")]);
logger.debug("Debug message", &[]);
logger.trace("Trace message", &[]);
assert_eq!(sink.len(), 2);
}
#[test]
fn test_memory_sink_capacity() {
let sink = MemoryLogSink::new(3);
let entry = LogEntry::new(LogLevel::Info, "msg");
sink.write(&entry, "log1").unwrap();
sink.write(&entry, "log2").unwrap();
sink.write(&entry, "log3").unwrap();
sink.write(&entry, "log4").unwrap();
let logs = sink.logs();
assert_eq!(logs.len(), 3);
assert_eq!(logs[0], "log2");
assert_eq!(logs[2], "log4");
}
#[test]
fn test_logger_stats() {
let logger = StructuredLogger::new(LogConfig::default());
let sink = Arc::new(MemoryLogSink::new(100));
logger.add_sink(sink);
logger.info("info", &[]);
logger.error("error", &[]);
logger.warn("warn", &[]);
let stats = logger.stats();
assert_eq!(stats.log_count, 3);
assert_eq!(stats.error_count, 1);
assert_eq!(stats.sink_count, 1);
}
#[test]
fn test_logger_disable() {
let logger = StructuredLogger::new(LogConfig::default());
let sink = Arc::new(MemoryLogSink::new(100));
logger.add_sink(sink.clone());
logger.info("before", &[]);
logger.set_enabled(false);
logger.info("during", &[]);
logger.set_enabled(true);
logger.info("after", &[]);
assert_eq!(sink.len(), 2);
}
#[test]
fn test_log_value_display() {
assert_eq!(LogValue::String("test".to_string()).to_string(), "test");
assert_eq!(LogValue::Int(-42).to_string(), "-42");
assert_eq!(LogValue::Uint(42).to_string(), "42");
assert_eq!(LogValue::Bool(true).to_string(), "true");
}
#[test]
fn test_console_sink() {
let sink = ConsoleSink::stderr();
assert_eq!(sink.name(), "console:stderr");
let sink = ConsoleSink::stdout();
assert_eq!(sink.name(), "console:stdout");
}
#[test]
fn test_log_config_presets() {
let dev = LogConfig::development();
assert_eq!(dev.level, LogLevel::Debug);
assert_eq!(dev.output, LogOutput::Pretty);
assert!(dev.include_location);
let prod = LogConfig::production();
assert_eq!(prod.level, LogLevel::Info);
assert_eq!(prod.output, LogOutput::Json);
assert!(prod.include_thread_id);
}
}