use crate::{
cli::{
error::{CliError, CliErrorKind},
output::OutputFormatter,
},
config::Config,
tools::performance::{MonitoringConfig, PerformanceMonitor, ProfilingResult},
};
use clap::{Args, Subcommand};
use serde_json;
use std::{path::PathBuf, time::Duration};
use tokio::fs;
use tracing::{info, warn};
#[derive(Debug, Args)]
pub struct PerformanceArgs {
#[command(subcommand)]
pub command: PerformanceCommand,
}
#[derive(Debug, Subcommand)]
pub enum PerformanceCommand {
Monitor(MonitorCommand),
Profile(ProfileCommand),
Compare(CompareCommand),
Health(HealthCommand),
Report(ReportCommand),
Optimizer(OptimizerCommand),
Advisor(AdvisorCommand),
Predictor(PredictorCommand),
}
#[derive(Debug, Args)]
pub struct MonitorCommand {
#[arg(short, long)]
pub duration: Option<u64>,
#[arg(short = 'I', long, default_value = "1000")]
pub interval: u64,
#[arg(short, long, default_value = "table")]
pub format: String,
#[arg(short = 's', long)]
pub save: Option<PathBuf>,
#[arg(short = 'C', long)]
pub continuous: bool,
#[arg(long)]
pub metrics: Option<Vec<String>>,
#[arg(long)]
pub cpu_threshold: Option<f32>,
#[arg(long)]
pub memory_threshold: Option<f32>,
}
#[derive(Debug, Args)]
pub struct ProfileCommand {
#[arg(short, long)]
pub operation: String,
pub command: Vec<String>,
#[arg(short, long, default_value = "table")]
pub format: String,
#[arg(short = 's', long)]
pub save: Option<PathBuf>,
#[arg(short, long)]
pub detailed: bool,
#[arg(long)]
pub metrics: Option<Vec<String>>,
#[arg(long)]
pub memory: bool,
#[arg(long)]
pub cpu: bool,
#[arg(long)]
pub io: bool,
#[arg(long)]
pub flamegraph: bool,
#[arg(long)]
pub flamegraph_output: Option<PathBuf>,
}
#[derive(Debug, Args)]
pub struct CompareCommand {
#[arg(short, long)]
pub baseline: PathBuf,
#[arg(short = 'C', long)]
pub current: PathBuf,
#[arg(short, long, default_value = "table")]
pub format: String,
#[arg(short = 's', long)]
pub save: Option<PathBuf>,
#[arg(long, default_value = "5.0")]
pub threshold: f64,
#[arg(short, long)]
pub detailed: bool,
#[arg(long)]
pub recommendations: bool,
}
#[derive(Debug, Args)]
pub struct HealthCommand {
#[arg(short, long, default_value = "table")]
pub format: String,
#[arg(short, long)]
pub recommendations: bool,
#[arg(long)]
pub components: Option<Vec<String>>,
#[arg(long)]
pub thresholds: Option<PathBuf>,
#[arg(short = 'C', long)]
pub continuous: bool,
#[arg(long, default_value = "30")]
pub interval: u64,
}
#[derive(Debug, Args)]
pub struct ReportCommand {
#[arg(short, long, default_value = "24")]
pub period: u64,
#[arg(short, long, default_value = "html")]
pub format: String,
#[arg(short, long)]
pub output: Option<PathBuf>,
#[arg(short, long)]
pub graphs: bool,
#[arg(long)]
pub system_metrics: bool,
#[arg(long)]
pub profiling_data: bool,
#[arg(long)]
pub recommendations: bool,
#[arg(long)]
pub template: Option<PathBuf>,
}
#[derive(Debug, Args)]
pub struct OptimizerCommand {
pub dataset: PathBuf,
#[arg(short, long, default_value = "text")]
pub format: String,
#[arg(short = 's', long)]
pub save: Option<PathBuf>,
#[arg(short, long)]
pub detailed: bool,
}
#[derive(Debug, Args)]
pub struct AdvisorCommand {
pub query: String,
#[arg(short, long)]
pub file: bool,
#[arg(short = 'o', long, default_value = "text")]
pub format: String,
#[arg(short = 'V', long)]
pub verbose: bool,
#[arg(short = 's', long)]
pub save: Option<PathBuf>,
}
#[derive(Debug, Args)]
pub struct PredictorCommand {
pub query: String,
#[arg(short, long)]
pub file: bool,
#[arg(short, long)]
pub train: Option<PathBuf>,
#[arg(short = 'o', long, default_value = "text")]
pub format: String,
#[arg(short = 's', long)]
pub save: Option<PathBuf>,
#[arg(short, long)]
pub detailed: bool,
}
impl PerformanceCommand {
pub async fn execute(&self, config: &Config) -> Result<(), CliError> {
match self {
PerformanceCommand::Monitor(cmd) => cmd.execute(config).await,
PerformanceCommand::Profile(cmd) => cmd.execute(config).await,
PerformanceCommand::Compare(cmd) => cmd.execute(config).await,
PerformanceCommand::Health(cmd) => cmd.execute(config).await,
PerformanceCommand::Report(cmd) => cmd.execute(config).await,
PerformanceCommand::Optimizer(cmd) => cmd.execute(config).await,
PerformanceCommand::Advisor(cmd) => cmd.execute(config).await,
PerformanceCommand::Predictor(cmd) => cmd.execute(config).await,
}
}
}
impl MonitorCommand {
pub async fn execute(&self, _config: &Config) -> Result<(), CliError> {
info!("Starting performance monitoring");
let monitoring_config = MonitoringConfig {
enable_continuous_monitoring: true,
sampling_interval_ms: self.interval,
memory_tracking: true,
cpu_tracking: true,
io_tracking: true,
network_tracking: true,
auto_profiling: false,
profile_threshold_ms: 100,
max_sessions: 100,
};
let monitor = PerformanceMonitor::new(monitoring_config);
monitor.start_monitoring().await?;
if self.continuous || self.duration.is_none() {
info!("Running continuous monitoring (Ctrl+C to stop)");
let mut interval = tokio::time::interval(Duration::from_millis(self.interval));
loop {
tokio::select! {
_ = interval.tick() => {
let report = monitor.generate_performance_report()?;
if let Some(cpu_threshold) = self.cpu_threshold {
if report.current_metrics.cpu_usage > cpu_threshold {
warn!("CPU usage ({:.1}%) exceeds threshold ({:.1}%)",
report.current_metrics.cpu_usage, cpu_threshold);
}
}
if let Some(memory_threshold) = self.memory_threshold {
let memory_percentage = (report.current_metrics.memory_usage as f64 /
report.current_metrics.memory_total as f64) * 100.0;
if memory_percentage > memory_threshold as f64 {
warn!("Memory usage ({:.1}%) exceeds threshold ({:.1}%)",
memory_percentage, memory_threshold);
}
}
let formatter = OutputFormatter::new(&self.format);
formatter.print_performance_report(&report)?;
if let Some(save_path) = &self.save {
let json_data = serde_json::to_string_pretty(&report)
.map_err(|e| CliError::serialization_error(e.to_string()))?;
fs::write(save_path, json_data).await
.map_err(|e| CliError::io_error(std::io::Error::new(std::io::ErrorKind::Other, format!("Failed to save monitoring data: {e}"))))?;
}
}
_ = tokio::signal::ctrl_c() => {
info!("Monitoring stopped by user");
break;
}
}
}
} else {
let duration_secs = self.duration.unwrap_or(60); let duration = Duration::from_secs(duration_secs);
info!("Monitoring for {} seconds", duration.as_secs());
tokio::time::sleep(duration).await;
let report = monitor.generate_performance_report()?;
let formatter = OutputFormatter::new(&self.format);
formatter.print_performance_report(&report)?;
if let Some(save_path) = &self.save {
let json_data = serde_json::to_string_pretty(&report)
.map_err(|e| CliError::serialization_error(e.to_string()))?;
fs::write(save_path, json_data).await.map_err(|e| {
CliError::io_error(std::io::Error::new(
std::io::ErrorKind::Other,
format!("Failed to save report: {e}"),
))
})?;
info!("Report saved to {}", save_path.display());
}
}
Ok(())
}
}
impl ProfileCommand {
pub async fn execute(&self, _config: &Config) -> Result<(), CliError> {
info!("Starting profiling for operation: {}", self.operation);
let monitoring_config = MonitoringConfig {
enable_continuous_monitoring: true,
sampling_interval_ms: 100, memory_tracking: self.memory,
cpu_tracking: self.cpu,
io_tracking: self.io,
network_tracking: true,
auto_profiling: true,
profile_threshold_ms: 10,
max_sessions: 50,
};
let monitor = PerformanceMonitor::new(monitoring_config);
monitor.start_monitoring().await?;
let session_id = monitor.start_profiling(&self.operation)?;
if self.detailed {
monitor.add_checkpoint(&session_id, "initialization")?;
}
if !self.command.is_empty() {
info!("Executing command: {}", self.command.join(" "));
if self.detailed {
monitor.add_checkpoint(&session_id, "command_start")?;
}
let execution_duration = Duration::from_millis(500); tokio::time::sleep(execution_duration).await;
if self.detailed {
monitor.add_checkpoint(&session_id, "command_complete")?;
}
}
if self.detailed {
monitor.add_checkpoint(&session_id, "finalization")?;
}
let result = monitor.finish_profiling(&session_id)?;
let formatter = OutputFormatter::new(&self.format);
formatter.print_profiling_result(&result)?;
if let Some(save_path) = &self.save {
let json_data = serde_json::to_string_pretty(&result)
.map_err(|e| CliError::serialization_error(e.to_string()))?;
fs::write(save_path, json_data).await.map_err(|e| {
CliError::io_error(std::io::Error::new(
std::io::ErrorKind::Other,
format!("Failed to save profile: {e}"),
))
})?;
info!("Profiling results saved to {}", save_path.display());
}
if self.flamegraph {
let flamegraph_path = self.flamegraph_output.clone().unwrap_or_else(|| {
PathBuf::from(format!(
"profile_{}.svg",
chrono::Utc::now().format("%Y%m%d_%H%M%S")
))
});
generate_flamegraph(&result, &flamegraph_path).await?;
info!("Flame graph generated: {}", flamegraph_path.display());
}
println!("\nPerformance Summary:");
println!(" Operation: {}", result.operation_name);
println!(" Duration: {:.3}s", result.total_duration.as_secs_f64());
println!(
" Efficiency Score: {:.1}/100",
result.performance_summary.efficiency_score
);
println!(
" Memory Delta: {:.2} MB",
result.performance_summary.memory_delta_bytes as f64 / 1_000_000.0
);
println!(
" Average CPU: {:.1}%",
result.performance_summary.average_cpu_usage
);
Ok(())
}
}
impl CompareCommand {
pub async fn execute(&self, _config: &Config) -> Result<(), CliError> {
info!(
"Comparing benchmarks: {} vs {}",
self.baseline.display(),
self.current.display()
);
let baseline_data = fs::read_to_string(&self.baseline).await.map_err(|e| {
CliError::io_error(std::io::Error::new(
std::io::ErrorKind::Other,
format!("Failed to read baseline file: {e}"),
))
})?;
let baseline: ProfilingResult = serde_json::from_str(&baseline_data)
.map_err(|e| CliError::serialization_error(format!("Failed to parse baseline: {e}")))?;
let current_data = fs::read_to_string(&self.current).await.map_err(|e| {
CliError::io_error(std::io::Error::new(
std::io::ErrorKind::Other,
format!("Failed to read current file: {e}"),
))
})?;
let current: ProfilingResult = serde_json::from_str(¤t_data)
.map_err(|e| CliError::serialization_error(format!("Failed to parse current: {e}")))?;
let config = MonitoringConfig::default();
let monitor = PerformanceMonitor::new(config);
let comparison = monitor.compare_benchmarks(&baseline, ¤t)?;
let formatter = OutputFormatter::new(&self.format);
formatter.print_benchmark_comparison(&comparison)?;
println!("\nComparison Summary:");
println!(" {}", comparison.improvement_summary);
println!(" Time Ratio: {:.3}x", comparison.time_ratio);
println!(" Memory Ratio: {:.3}x", comparison.memory_ratio);
println!(
" Overall Performance: {:.3}x",
comparison.performance_ratio
);
if self.recommendations {
println!("\nRecommendations:");
if comparison.performance_ratio < 0.9 {
println!(" ✅ Excellent performance improvement!");
println!(" ✅ Consider documenting the optimizations made");
} else if comparison.performance_ratio > 1.1 {
println!(" ⚠️ Performance regression detected");
println!(" ⚠️ Review recent changes for potential issues");
println!(" ⚠️ Consider profiling to identify bottlenecks");
} else {
println!(" ℹ️ Performance is comparable");
println!(" ℹ️ Consider micro-optimizations if needed");
}
}
if let Some(save_path) = &self.save {
let json_data = serde_json::to_string_pretty(&comparison)
.map_err(|e| CliError::serialization_error(e.to_string()))?;
fs::write(save_path, json_data).await.map_err(|e| {
CliError::io_error(std::io::Error::new(
std::io::ErrorKind::Other,
format!("Failed to save comparison: {e}"),
))
})?;
info!("Comparison report saved to {}", save_path.display());
}
Ok(())
}
}
impl HealthCommand {
pub async fn execute(&self, _config: &Config) -> Result<(), CliError> {
info!("Performing system health check");
let config = MonitoringConfig::default();
let monitor = PerformanceMonitor::new(config);
monitor.start_monitoring().await?;
if self.continuous {
info!("Running continuous health monitoring (Ctrl+C to stop)");
let mut interval = tokio::time::interval(Duration::from_secs(self.interval));
loop {
tokio::select! {
_ = interval.tick() => {
let report = monitor.generate_performance_report()?;
let formatter = OutputFormatter::new(&self.format);
formatter.print_system_health(&report.system_health)?;
if self.recommendations {
println!("\nRecommendations:");
for rec in &report.recommendations {
println!(" • {rec}");
}
}
}
_ = tokio::signal::ctrl_c() => {
info!("Health monitoring stopped by user");
break;
}
}
}
} else {
let report = monitor.generate_performance_report()?;
let formatter = OutputFormatter::new(&self.format);
formatter.print_system_health(&report.system_health)?;
if self.recommendations {
println!("\nSystem Recommendations:");
for rec in &report.recommendations {
println!(" • {rec}");
}
println!("\nHealth-specific Recommendations:");
for rec in &report.system_health.recommendations {
println!(" • {rec}");
}
}
}
Ok(())
}
}
impl ReportCommand {
pub async fn execute(&self, _config: &Config) -> Result<(), CliError> {
info!("Generating performance report for {} hours", self.period);
let config = MonitoringConfig::default();
let monitor = PerformanceMonitor::new(config);
monitor.start_monitoring().await?;
let report = monitor.generate_performance_report()?;
match self.format.as_str() {
"html" => self.generate_html_report(&report).await?,
"pdf" => self.generate_pdf_report(&report).await?,
"markdown" => self.generate_markdown_report(&report).await?,
"json" => self.generate_json_report(&report).await?,
_ => {
let formatter = OutputFormatter::new(&self.format);
formatter.print_performance_report(&report)?;
}
}
Ok(())
}
async fn generate_html_report(
&self,
report: &crate::tools::performance::PerformanceReport,
) -> Result<(), CliError> {
let html_content = format!(
r#"<!DOCTYPE html>
<html>
<head>
<title>OxiRS Performance Report</title>
<style>
body {{ font-family: Arial, sans-serif; margin: 40px; }}
.header {{ background: #f4f4f4; padding: 20px; border-radius: 5px; }}
.metric {{ margin: 10px 0; }}
.status-healthy {{ color: green; }}
.status-warning {{ color: orange; }}
.status-critical {{ color: red; }}
</style>
</head>
<body>
<div class="header">
<h1>OxiRS Performance Report</h1>
<p>Generated: {}</p>
</div>
<h2>System Health</h2>
<div class="metric">Status: <span class="status-{:?}">{:?}</span></div>
<div class="metric">CPU Usage: {:.1}%</div>
<div class="metric">Memory Usage: {:.1}%</div>
<h2>Performance Metrics</h2>
<div class="metric">Memory Used: {:.2} GB</div>
<div class="metric">Memory Total: {:.2} GB</div>
<div class="metric">Active Sessions: {}</div>
<h2>Recommendations</h2>
<ul>
{}
</ul>
</body>
</html>"#,
chrono::Utc::now().format("%Y-%m-%d %H:%M:%S UTC"),
report.system_health.status,
report.system_health.status,
report.system_health.cpu_usage_percentage,
report.system_health.memory_usage_percentage,
report.current_metrics.memory_usage as f64 / 1_000_000_000.0,
report.current_metrics.memory_total as f64 / 1_000_000_000.0,
report.active_profiling_sessions,
report
.recommendations
.iter()
.map(|r| format!("<li>{r}</li>"))
.collect::<Vec<_>>()
.join("\n ")
);
let output_path = self
.output
.as_ref()
.cloned()
.unwrap_or_else(|| PathBuf::from("performance_report.html"));
fs::write(&output_path, html_content).await.map_err(|e| {
CliError::io_error(std::io::Error::new(
std::io::ErrorKind::Other,
format!("Failed to write HTML report: {e}"),
))
})?;
info!("HTML report saved to {}", output_path.display());
Ok(())
}
async fn generate_pdf_report(
&self,
report: &crate::tools::performance::PerformanceReport,
) -> Result<(), CliError> {
use printpdf::*;
info!("Generating PDF performance report");
let mut doc = PdfDocument::new("OxiRS Performance Report");
let mut ops = vec![Op::SaveGraphicsState, Op::StartTextSection];
ops.push(Op::SetTextCursor {
pos: Point::new(Mm(20.0), Mm(270.0)),
});
ops.push(Op::SetFont {
font: PdfFontHandle::Builtin(BuiltinFont::HelveticaBold),
size: Pt(24.0),
});
ops.push(Op::SetLineHeight { lh: Pt(24.0) });
ops.push(Op::ShowText {
items: vec![TextItem::Text("OxiRS Performance Report".to_string())],
});
ops.push(Op::AddLineBreak);
ops.push(Op::AddLineBreak);
let timestamp = chrono::Utc::now()
.format("%Y-%m-%d %H:%M:%S UTC")
.to_string();
ops.push(Op::SetFont {
font: PdfFontHandle::Builtin(BuiltinFont::Helvetica),
size: Pt(10.0),
});
ops.push(Op::SetLineHeight { lh: Pt(12.0) });
ops.push(Op::ShowText {
items: vec![TextItem::Text(format!("Generated: {}", timestamp))],
});
ops.push(Op::AddLineBreak);
ops.push(Op::AddLineBreak);
ops.push(Op::SetFont {
font: PdfFontHandle::Builtin(BuiltinFont::HelveticaBold),
size: Pt(16.0),
});
ops.push(Op::SetLineHeight { lh: Pt(18.0) });
ops.push(Op::ShowText {
items: vec![TextItem::Text("System Health".to_string())],
});
ops.push(Op::AddLineBreak);
ops.push(Op::SetFont {
font: PdfFontHandle::Builtin(BuiltinFont::Helvetica),
size: Pt(10.0),
});
ops.push(Op::SetLineHeight { lh: Pt(12.0) });
let health_items = vec![
format!("Status: {:?}", report.system_health.status),
format!(
"CPU Usage: {:.1}%",
report.system_health.cpu_usage_percentage
),
format!(
"Memory Usage: {:.1}%",
report.system_health.memory_usage_percentage
),
format!(
"Disk Issues: {}",
if report.system_health.disk_space_issues {
"Yes"
} else {
"No"
}
),
];
for item in health_items {
ops.push(Op::ShowText {
items: vec![TextItem::Text(item)],
});
ops.push(Op::AddLineBreak);
}
ops.push(Op::AddLineBreak);
ops.push(Op::SetFont {
font: PdfFontHandle::Builtin(BuiltinFont::HelveticaBold),
size: Pt(16.0),
});
ops.push(Op::SetLineHeight { lh: Pt(18.0) });
ops.push(Op::ShowText {
items: vec![TextItem::Text("Performance Metrics".to_string())],
});
ops.push(Op::AddLineBreak);
ops.push(Op::SetFont {
font: PdfFontHandle::Builtin(BuiltinFont::Helvetica),
size: Pt(10.0),
});
ops.push(Op::SetLineHeight { lh: Pt(12.0) });
let metrics = vec![
format!(
"Memory Used: {:.2} GB",
report.current_metrics.memory_usage as f64 / 1_000_000_000.0
),
format!(
"Memory Total: {:.2} GB",
report.current_metrics.memory_total as f64 / 1_000_000_000.0
),
format!("Active Sessions: {}", report.active_profiling_sessions),
format!(
"Performance Counters: {}",
report.performance_counters.len()
),
];
for item in metrics {
ops.push(Op::ShowText {
items: vec![TextItem::Text(item)],
});
ops.push(Op::AddLineBreak);
}
ops.push(Op::AddLineBreak);
if !report.recommendations.is_empty() {
ops.push(Op::SetFont {
font: PdfFontHandle::Builtin(BuiltinFont::HelveticaBold),
size: Pt(16.0),
});
ops.push(Op::SetLineHeight { lh: Pt(18.0) });
ops.push(Op::ShowText {
items: vec![TextItem::Text("Recommendations".to_string())],
});
ops.push(Op::AddLineBreak);
ops.push(Op::SetFont {
font: PdfFontHandle::Builtin(BuiltinFont::Helvetica),
size: Pt(10.0),
});
ops.push(Op::SetLineHeight { lh: Pt(12.0) });
for rec in report.recommendations.iter().take(20) {
let rec_text = if rec.len() > 80 {
format!("• {}...", &rec[..77])
} else {
format!("• {}", rec)
};
ops.push(Op::ShowText {
items: vec![TextItem::Text(rec_text)],
});
ops.push(Op::AddLineBreak);
}
}
ops.push(Op::EndTextSection);
ops.push(Op::RestoreGraphicsState);
let page = PdfPage::new(Mm(210.0), Mm(297.0), ops);
let output_path = self
.output
.as_ref()
.cloned()
.unwrap_or_else(|| PathBuf::from("performance_report.pdf"));
let pdf_bytes = doc
.with_pages(vec![page])
.save(&PdfSaveOptions::default(), &mut Vec::new());
std::fs::write(&output_path, pdf_bytes).map_err(|e| {
CliError::io_error(std::io::Error::new(
std::io::ErrorKind::Other,
format!("Failed to save PDF: {}", e),
))
})?;
info!("PDF report saved to {}", output_path.display());
Ok(())
}
async fn generate_markdown_report(
&self,
report: &crate::tools::performance::PerformanceReport,
) -> Result<(), CliError> {
let markdown_content = format!(
r#"# OxiRS Performance Report
**Generated**: {}
## System Health
- **Status**: {:?}
- **CPU Usage**: {:.1}%
- **Memory Usage**: {:.1}%
## Performance Metrics
- **Memory Used**: {:.2} GB
- **Memory Total**: {:.2} GB
- **Active Profiling Sessions**: {}
## Recommendations
{}
"#,
chrono::Utc::now().format("%Y-%m-%d %H:%M:%S UTC"),
report.system_health.status,
report.system_health.cpu_usage_percentage,
report.system_health.memory_usage_percentage,
report.current_metrics.memory_usage as f64 / 1_000_000_000.0,
report.current_metrics.memory_total as f64 / 1_000_000_000.0,
report.active_profiling_sessions,
report
.recommendations
.iter()
.map(|r| format!("- {r}"))
.collect::<Vec<_>>()
.join("\n")
);
let output_path = self
.output
.as_ref()
.cloned()
.unwrap_or_else(|| PathBuf::from("performance_report.md"));
fs::write(&output_path, markdown_content)
.await
.map_err(|e| {
CliError::io_error(std::io::Error::new(
std::io::ErrorKind::Other,
format!("Failed to write Markdown report: {e}"),
))
})?;
info!("Markdown report saved to {}", output_path.display());
Ok(())
}
async fn generate_json_report(
&self,
report: &crate::tools::performance::PerformanceReport,
) -> Result<(), CliError> {
let json_content = serde_json::to_string_pretty(report)
.map_err(|e| CliError::serialization_error(e.to_string()))?;
let output_path = self
.output
.as_ref()
.cloned()
.unwrap_or_else(|| PathBuf::from("performance_report.json"));
fs::write(&output_path, json_content).await.map_err(|e| {
CliError::io_error(std::io::Error::new(
std::io::ErrorKind::Other,
format!("Failed to write JSON report: {e}"),
))
})?;
info!("JSON report saved to {}", output_path.display());
Ok(())
}
}
async fn generate_flamegraph(
result: &ProfilingResult,
output_path: &PathBuf,
) -> Result<(), CliError> {
use inferno::flamegraph;
use std::io::BufWriter;
use std::str::FromStr;
let mut lines = Vec::new();
let total_ms = result.total_duration.as_millis();
lines.push(format!("{} {}", result.operation_name, total_ms));
if !result.checkpoints.is_empty() {
let mut prev_time = std::time::Duration::from_secs(0);
for (idx, checkpoint) in result.checkpoints.iter().enumerate() {
let delta_ms = (checkpoint.duration_from_start - prev_time).as_millis();
if delta_ms > 0 {
let stack_frame = format!(
"{};checkpoint_{:03}_{}",
result.operation_name,
idx + 1,
checkpoint.name.replace(' ', "_")
);
lines.push(format!("{} {}", stack_frame, delta_ms));
}
prev_time = checkpoint.duration_from_start;
}
}
let memory_delta =
result.end_metrics.memory_usage as i64 - result.start_metrics.memory_usage as i64;
if memory_delta.abs() > 1_000_000 {
let mem_frame = format!("{};memory_delta", result.operation_name);
let mem_weight = (memory_delta.abs() as f64 / 1_000_000.0 * total_ms as f64) as u128;
if mem_weight > 0 {
lines.push(format!("{} {}", mem_frame, mem_weight.min(total_ms)));
}
}
let avg_cpu = result.performance_summary.average_cpu_usage;
if avg_cpu > 1.0 {
let cpu_frame = format!("{};cpu_usage", result.operation_name);
let cpu_weight = (avg_cpu * total_ms as f32 / 100.0) as u128;
if cpu_weight > 0 {
lines.push(format!("{} {}", cpu_frame, cpu_weight));
}
}
if lines.len() == 1 {
lines.push(format!("{};execution {}", result.operation_name, total_ms));
}
lines.sort();
let input_data = lines.join("\n");
let input_bytes = input_data.as_bytes();
let output_file = std::fs::File::create(output_path).map_err(|e| {
CliError::io_error(std::io::Error::new(
std::io::ErrorKind::Other,
format!("Failed to create flame graph file: {}", e),
))
})?;
let writer = BufWriter::new(output_file);
let mut options = flamegraph::Options::default();
options.title = format!("Profile: {}", result.operation_name);
options.count_name = "ms".to_string();
options.colors = flamegraph::color::Palette::from_str("hot")
.unwrap_or(flamegraph::color::Palette::default());
options.min_width = 0.1;
flamegraph::from_reader(&mut options, input_bytes, writer).map_err(|e| {
CliError::io_error(std::io::Error::new(
std::io::ErrorKind::Other,
format!("Failed to generate flame graph: {}", e),
))
})?;
Ok(())
}
impl OptimizerCommand {
pub async fn execute(&self, _config: &Config) -> Result<(), CliError> {
info!("Analyzing dataset performance: {}", self.dataset.display());
super::performance_optimizer::optimize_dataset_cmd(
self.dataset.to_string_lossy().to_string(),
)
.await
.map_err(|e| {
CliError::new(CliErrorKind::Other(format!(
"Dataset optimization failed: {}",
e
)))
})?;
if self.detailed {
info!("Detailed analysis complete. Check output for recommendations.");
}
Ok(())
}
}
impl AdvisorCommand {
pub async fn execute(&self, _config: &Config) -> Result<(), CliError> {
info!("Analyzing SPARQL query for best practices");
let query = if self.file {
tokio::fs::read_to_string(&self.query).await.map_err(|e| {
CliError::io_error(std::io::Error::new(
std::io::ErrorKind::NotFound,
format!("Failed to read query file: {}", e),
))
})?
} else {
self.query.clone()
};
super::query_advisor::analyze_query_cmd(query, self.verbose)
.await
.map_err(|e| {
CliError::new(CliErrorKind::Other(format!("Query analysis failed: {}", e)))
})?;
if self.verbose {
info!("Detailed query analysis complete with metrics.");
}
Ok(())
}
}
impl PredictorCommand {
pub async fn execute(&self, _config: &Config) -> Result<(), CliError> {
info!("Predicting SPARQL query performance with ML");
let query = if self.file {
tokio::fs::read_to_string(&self.query).await.map_err(|e| {
CliError::io_error(std::io::Error::new(
std::io::ErrorKind::NotFound,
format!("Failed to read query file: {}", e),
))
})?
} else {
self.query.clone()
};
let train_data = self.train.as_ref().map(|p| p.to_string_lossy().to_string());
super::query_predictor::predict_query_performance_cmd(query, train_data)
.await
.map_err(|e| {
CliError::new(CliErrorKind::Other(format!(
"Performance prediction failed: {}",
e
)))
})?;
if self.detailed {
info!("Detailed performance prediction complete with feature analysis.");
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn test_monitor_command_validation() {
let cmd = MonitorCommand {
duration: Some(1),
interval: 100,
format: "json".to_string(),
save: None,
continuous: false,
metrics: None,
cpu_threshold: Some(80.0),
memory_threshold: Some(90.0),
};
assert_eq!(cmd.interval, 100);
assert_eq!(cmd.format, "json");
}
#[tokio::test]
async fn test_profile_command_creation() {
let cmd = ProfileCommand {
operation: "test_op".to_string(),
command: vec!["echo".to_string(), "hello".to_string()],
format: "table".to_string(),
save: None,
detailed: true,
metrics: None,
memory: true,
cpu: true,
io: false,
flamegraph: true,
flamegraph_output: Some(PathBuf::from("/tmp/test_profile.svg")),
};
assert_eq!(cmd.operation, "test_op");
assert!(cmd.detailed);
assert!(cmd.memory);
assert!(cmd.flamegraph);
}
#[test]
fn test_optimizer_command_creation() {
let cmd = OptimizerCommand {
dataset: PathBuf::from("/tmp/test_dataset"),
format: "json".to_string(),
save: Some(PathBuf::from("/tmp/optimization_report.json")),
detailed: true,
};
assert_eq!(cmd.dataset, PathBuf::from("/tmp/test_dataset"));
assert_eq!(cmd.format, "json");
assert!(cmd.detailed);
assert!(cmd.save.is_some());
}
#[test]
fn test_advisor_command_creation() {
let cmd = AdvisorCommand {
query: "SELECT * WHERE { ?s ?p ?o }".to_string(),
file: false,
format: "text".to_string(),
verbose: true,
save: None,
};
assert_eq!(cmd.query, "SELECT * WHERE { ?s ?p ?o }");
assert!(!cmd.file);
assert!(cmd.verbose);
assert!(cmd.save.is_none());
}
#[test]
fn test_advisor_command_file_mode() {
let cmd = AdvisorCommand {
query: "/tmp/query.sparql".to_string(),
file: true,
format: "json".to_string(),
verbose: false,
save: Some(PathBuf::from("/tmp/analysis.json")),
};
assert_eq!(cmd.query, "/tmp/query.sparql");
assert!(cmd.file);
assert!(!cmd.verbose);
assert_eq!(cmd.save, Some(PathBuf::from("/tmp/analysis.json")));
}
#[test]
fn test_predictor_command_creation() {
let cmd = PredictorCommand {
query: "SELECT ?s WHERE { ?s ?p ?o } LIMIT 100".to_string(),
file: false,
train: None,
format: "text".to_string(),
save: None,
detailed: false,
};
assert_eq!(cmd.query, "SELECT ?s WHERE { ?s ?p ?o } LIMIT 100");
assert!(!cmd.file);
assert!(cmd.train.is_none());
assert!(!cmd.detailed);
}
#[test]
fn test_predictor_command_with_training_data() {
let cmd = PredictorCommand {
query: "SELECT * WHERE { ?s ?p ?o }".to_string(),
file: false,
train: Some(PathBuf::from("/tmp/training_data.json")),
format: "json".to_string(),
save: Some(PathBuf::from("/tmp/prediction.json")),
detailed: true,
};
assert!(cmd.train.is_some());
assert_eq!(cmd.train, Some(PathBuf::from("/tmp/training_data.json")));
assert!(cmd.detailed);
assert_eq!(cmd.format, "json");
}
#[test]
fn test_predictor_command_file_mode() {
let cmd = PredictorCommand {
query: "/tmp/complex_query.sparql".to_string(),
file: true,
train: Some(PathBuf::from("/tmp/historical_data.json")),
format: "text".to_string(),
save: Some(PathBuf::from("/tmp/perf_prediction.txt")),
detailed: true,
};
assert_eq!(cmd.query, "/tmp/complex_query.sparql");
assert!(cmd.file);
assert!(cmd.train.is_some());
assert!(cmd.detailed);
}
}