pub use self::core::{
ClusterOperationCounters, EdgeOperationMetrics, FreeSpaceOperationMetrics, GlobalCounters,
NodeOperationMetrics, StringTableOperationMetrics, V2WALMetrics, WALPerformanceCounters,
};
pub use self::aggregation::{LatencyHistogram, ThroughputTracker};
pub use self::reporting::{
ClusterGlobalMetrics, ClusterMetrics, ClusterPerformanceMetrics, ErrorEntry, ErrorTracker,
MetricsReport, ResourceTracker,
};
pub use self::analysis::{
ImplementationDifficulty, IssueSeverity, OptimizationOpportunity, PerformanceAnalysis,
PerformanceAnalyzer, PerformanceCategoryScores, PerformanceIssue, PerformanceTrend,
Recommendation, RecommendationPriority, TrendDirection,
};
pub mod core;
pub mod collection;
pub mod aggregation;
pub mod reporting;
pub mod analysis;
pub const METRICS_VERSION: &str = "1.0.0";
pub mod defaults {
pub const DEFAULT_THROUGHPUT_WINDOW: usize = 60;
pub const DEFAULT_LATENCY_BUCKETS: usize = 10;
pub const DEFAULT_MAX_RECENT_ERRORS: usize = 1000;
pub const DEFAULT_BUFFER_UTILIZATION_THRESHOLD: f64 = 80.0;
pub const DEFAULT_LATENCY_PERCENTILES: &[f64] = &[50.0, 95.0, 99.0];
}
pub mod utils {
use super::*;
pub fn create_default_metrics() -> V2WALMetrics {
V2WALMetrics::new()
}
pub fn create_default_analyzer() -> PerformanceAnalyzer {
PerformanceAnalyzer::new()
}
pub fn generate_performance_report(metrics: &V2WALMetrics) -> String {
let counters = metrics.get_counters();
let global_counters = metrics.get_global_counters();
let resource_tracker = metrics.get_resource_tracker();
let cluster_metrics = metrics.get_cluster_metrics();
let error_tracker = metrics.get_error_tracker();
format!(
"=== V2 WAL Performance Report ===\n\
Records Processed: {}\n\
Bytes Transferred: {} MB\n\
Records Written: {}\n\
Records Read: {}\n\
Buffer Utilization: {:.1}%\n\
{}\n\
{}\n\
Errors: {}\n\
===============================",
counters.records_processed,
counters.bytes_transferred / (1024 * 1024),
global_counters.0,
global_counters.1,
counters.buffer_utilization_percent,
resource_tracker.get_summary(),
cluster_metrics.get_summary(),
error_tracker.get_summary()
)
}
pub fn check_performance_health(metrics: &V2WALMetrics) -> (bool, String) {
let counters = metrics.get_counters();
let resource_tracker = metrics.get_resource_tracker();
let error_tracker = metrics.get_error_tracker();
let total_errors: u64 = error_tracker.error_counts.values().sum();
let error_rate = if counters.records_processed > 0 {
(total_errors as f64 / counters.records_processed as f64) * 100.0
} else {
0.0
};
let buffer_healthy =
counters.buffer_utilization_percent < defaults::DEFAULT_BUFFER_UTILIZATION_THRESHOLD;
let memory_healthy = resource_tracker.memory_usage_bytes < 1024 * 1024 * 1024; let error_threshold = if counters.records_processed < 100 {
50.0 } else {
1.0 };
let error_healthy = error_rate < error_threshold;
let is_healthy = buffer_healthy && memory_healthy && error_healthy;
let description = if is_healthy {
"All metrics within acceptable ranges".to_string()
} else {
let mut issues = Vec::new();
if !buffer_healthy {
issues.push(format!(
"High buffer utilization: {:.1}%",
counters.buffer_utilization_percent
));
}
if !memory_healthy {
issues.push(format!(
"High memory usage: {} MB",
resource_tracker.memory_usage_bytes / (1024 * 1024)
));
}
if !error_healthy {
issues.push(format!("High error rate: {:.2}%", error_rate));
}
format!("Issues detected: {}", issues.join(", "))
};
(is_healthy, description)
}
}
#[cfg(test)]
mod integration_tests {
use super::core::V2WALMetrics;
use super::*;
#[test]
fn test_full_metrics_workflow() {
let metrics = utils::create_default_metrics();
metrics.record_write_operation(100, 50, Some(42), "edge_insert");
metrics.record_write_operation(150, 75, Some(43), "node_insert");
metrics.record_read_operation(80, 25, Some(42), "edge_read");
metrics.record_error(
"TestError",
"Test message",
"test_operation",
"test_recovery",
);
let counters = metrics.get_counters();
assert_eq!(counters.records_processed, 3);
assert_eq!(counters.edge_operations.total_inserts, 1);
assert_eq!(counters.node_operations.total_inserts, 1);
let global_counters = metrics.get_global_counters();
assert_eq!(global_counters.0, 2); assert_eq!(global_counters.1, 1);
let resource_tracker = metrics.get_resource_tracker();
assert!(resource_tracker.memory_usage_bytes > 0);
let error_tracker = metrics.get_error_tracker();
assert_eq!(error_tracker.error_counts.get("TestError"), Some(&1));
let analyzer = utils::create_default_analyzer();
let analysis = analyzer.analyze(&metrics);
assert!(analysis.overall_score >= 0.0 && analysis.overall_score <= 100.0);
let report = utils::generate_performance_report(&metrics);
assert!(report.contains("Records Processed: 3"));
assert!(report.contains("Records Written: 2"));
let (healthy, description) = utils::check_performance_health(&metrics);
assert!(healthy); assert!(description.contains("acceptable ranges"));
}
#[test]
fn test_backward_compatibility() {
let metrics = V2WALMetrics::new();
let counters = metrics.get_counters();
let global_counters = metrics.get_global_counters();
metrics.record_write_operation(100, 50, Some(42), "edge_insert");
metrics.record_read_operation(80, 25, Some(42), "edge_read");
metrics.record_error("TestError", "Test message", "test", "recovery");
let _latency_histogram = metrics.get_latency_histogram();
let _throughput_tracker = metrics.get_throughput_tracker();
let _resource_tracker = metrics.get_resource_tracker();
let _cluster_metrics = metrics.get_cluster_metrics();
let _error_tracker = metrics.get_error_tracker();
let updated_counters = metrics.get_counters();
assert_eq!(updated_counters.records_processed, 2);
assert_eq!(updated_counters.edge_operations.total_inserts, 1);
let updated_global = metrics.get_global_counters();
assert_eq!(updated_global.0, 1); assert_eq!(updated_global.1, 1); }
#[test]
fn test_modular_api_access() {
use super::aggregation::{LatencyHistogram, ThroughputTracker};
use super::analysis::{PerformanceAnalysis, PerformanceAnalyzer};
use super::reporting::{ClusterPerformanceMetrics, ErrorTracker, ResourceTracker};
let metrics = V2WALMetrics::new();
let analyzer = PerformanceAnalyzer::new();
let analysis: PerformanceAnalysis = analyzer.analyze(&metrics);
assert!(analysis.overall_score >= 0.0);
let latency_histogram = LatencyHistogram::new();
let throughput_tracker = ThroughputTracker::new();
let resource_tracker = ResourceTracker::new();
let cluster_metrics = ClusterPerformanceMetrics::new();
let error_tracker = ErrorTracker::new();
assert_eq!(latency_histogram.get_write_percentile(50.0), 0); let (writes, reads, txs) = throughput_tracker.get_current_throughput();
assert_eq!(writes, 0.0); assert_eq!(reads, 0.0);
assert_eq!(txs, 0.0);
}
#[test]
fn test_metrics_configuration() {
assert_eq!(METRICS_VERSION, "1.0.0");
assert_eq!(defaults::DEFAULT_THROUGHPUT_WINDOW, 60);
assert_eq!(defaults::DEFAULT_LATENCY_BUCKETS, 10);
assert_eq!(defaults::DEFAULT_MAX_RECENT_ERRORS, 1000);
assert_eq!(defaults::DEFAULT_BUFFER_UTILIZATION_THRESHOLD, 80.0);
assert_eq!(defaults::DEFAULT_LATENCY_PERCENTILES.len(), 3);
}
#[test]
fn test_utility_functions() {
let metrics = utils::create_default_metrics();
let analyzer = utils::create_default_analyzer();
metrics.record_write_operation(100, 50, Some(42), "edge_insert");
let report = utils::generate_performance_report(&metrics);
assert!(report.contains("=== V2 WAL Performance Report ==="));
assert!(report.contains("Records Processed: 1"));
let (healthy, description) = utils::check_performance_health(&metrics);
assert!(healthy);
assert!(description.len() > 0);
}
#[test]
fn test_analysis_integration() {
use super::analysis::{IssueSeverity, PerformanceAnalyzer, RecommendationPriority};
let metrics = V2WALMetrics::new();
let analyzer = PerformanceAnalyzer::new();
for _ in 0..1000 {
metrics.record_write_operation(100, 2000, Some(42), "edge_insert"); }
let analysis = analyzer.analyze(&metrics);
assert!(analysis.overall_score <= 100.0);
let critical_issues = analysis.get_critical_issues();
let immediate_recs = analysis.get_immediate_recommendations();
if !analysis.issues.is_empty() {
assert!(!critical_issues.is_empty() || critical_issues.is_empty()); }
}
#[test]
fn test_serde_compatibility() {
use super::core::WALPerformanceCounters;
use super::reporting::{MetricsReport, ResourceTracker};
let report = MetricsReport {
timestamp: 1234567890,
performance_counters: WALPerformanceCounters::default(),
resource_metrics: ResourceTracker::default(),
cluster_metrics: ClusterPerformanceMetrics::default(),
error_summary: std::collections::HashMap::new(),
global_counters: (10, 20, 1024, 2048, 5),
};
let json = serde_json::to_string(&report);
assert!(json.is_ok());
let json_str = json.unwrap();
let _deserialized: MetricsReport = serde_json::from_str(&json_str).unwrap();
}
}