use std::path::PathBuf;
use std::time::Instant;
use tempfile::TempDir;
use thread_flow::incremental::analyzer::IncrementalAnalyzer;
use thread_flow::incremental::storage::InMemoryStorage;
use thread_flow::incremental::types::DependencyEdge;
use tokio::fs;
fn init_observability() {
env_logger::Builder::from_default_env()
.format_timestamp_micros()
.init();
metrics_exporter_prometheus::PrometheusBuilder::new()
.install()
.expect("failed to install metrics recorder");
tracing::info!("observability initialized");
}
async fn create_test_file(dir: &TempDir, name: &str, content: &str) -> PathBuf {
let path = dir.path().join(name);
fs::write(&path, content).await.unwrap();
path
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
init_observability();
tracing::info!("=== Observability Example ===");
let temp_dir = tempfile::tempdir()?;
let file1 = create_test_file(&temp_dir, "main.rs", "fn main() {}").await;
let file2 = create_test_file(&temp_dir, "utils.rs", "pub fn helper() {}").await;
let file3 = create_test_file(&temp_dir, "lib.rs", "pub mod utils;").await;
let storage = Box::new(InMemoryStorage::new());
let mut analyzer = IncrementalAnalyzer::new(storage);
tracing::info!("=== Phase 1: Initial Analysis (Cold Cache) ===");
let start = Instant::now();
let result = analyzer
.analyze_changes(&[file1.clone(), file2.clone(), file3.clone()])
.await?;
tracing::info!(
"initial analysis: {} changed files, cache hit rate: {:.1}%, duration: {:?}",
result.changed_files.len(),
result.cache_hit_rate * 100.0,
start.elapsed()
);
tracing::info!("=== Phase 2: Unchanged Analysis (Warm Cache) ===");
let start = Instant::now();
let result = analyzer
.analyze_changes(&[file1.clone(), file2.clone(), file3.clone()])
.await?;
tracing::info!(
"warm cache analysis: {} changed files, cache hit rate: {:.1}%, duration: {:?}",
result.changed_files.len(),
result.cache_hit_rate * 100.0,
start.elapsed()
);
tracing::info!("=== Phase 3: Partial Change (Mixed Cache) ===");
fs::write(&file2, "pub fn helper() { println!(\"updated\"); }")
.await
.unwrap();
let start = Instant::now();
let result = analyzer
.analyze_changes(&[file1.clone(), file2.clone(), file3.clone()])
.await?;
tracing::info!(
"mixed cache analysis: {} changed files, cache hit rate: {:.1}%, duration: {:?}",
result.changed_files.len(),
result.cache_hit_rate * 100.0,
start.elapsed()
);
tracing::info!("=== Phase 4: Dependency Graph Operations ===");
analyzer.graph_mut().add_edge(DependencyEdge::new(
file3.clone(),
file2.clone(),
thread_flow::incremental::types::DependencyType::Import,
));
analyzer.graph_mut().add_edge(DependencyEdge::new(
file1.clone(),
file3.clone(),
thread_flow::incremental::types::DependencyType::Import,
));
tracing::info!(
"graph: {} nodes, {} edges",
analyzer.graph().node_count(),
analyzer.graph().edge_count()
);
let start = Instant::now();
let affected = analyzer
.invalidate_dependents(std::slice::from_ref(&file2))
.await?;
tracing::info!(
"invalidation: {} affected files, duration: {:?}",
affected.len(),
start.elapsed()
);
tracing::info!("=== Metrics Summary ===");
tracing::info!("All operations complete. Metrics recorded:");
tracing::info!(" - cache_hits_total: counter");
tracing::info!(" - cache_misses_total: counter");
tracing::info!(" - cache_hit_rate: gauge (target >90%)");
tracing::info!(" - analysis_overhead_ms: histogram (target <10ms)");
tracing::info!(" - invalidation_time_ms: histogram (target <50ms)");
tracing::info!(" - graph_nodes: gauge");
tracing::info!(" - graph_edges: gauge");
tracing::info!(" - storage_reads_total: counter");
tracing::info!(" - storage_writes_total: counter");
tracing::info!(" - storage_read_latency_ms: histogram");
tracing::info!(" - storage_write_latency_ms: histogram");
Ok(())
}