reasonkit-core 0.1.8

The Reasoning Engine โ€” Auditable Reasoning for Production AI | Rust-Native | Turn Prompts into Protocols
//! # RAG Performance Regression Detection Example
//!
//! This example demonstrates how to use the RAG performance monitoring system
//! to detect performance regressions in core RAG operations.

use reasonkit::rag::performance::{PerformanceConfig, RagPerformanceMonitor};
use reasonkit::rag::RagEngine;
use std::time::Duration;

#[tokio::main]
async fn main() -> anyhow::Result<()> {
    println!("๐Ÿงช RAG Performance Regression Detection Demo");
    println!("=============================================");

    // Create RAG engine
    let rag_engine = RagEngine::in_memory()
        .map_err(|e| anyhow::anyhow!("Failed to create RAG engine: {}", e))?;

    // Configure performance monitoring
    let config = PerformanceConfig {
        alert_threshold: 0.05, // 5% degradation threshold
        sample_size: 10,       // Smaller sample for demo
        confidence_level: 0.95,
        history_window: 50,
        benchmark_queries: vec![
            "What is machine learning?".to_string(),
            "Explain neural networks".to_string(),
            "How does backpropagation work?".to_string(),
        ],
        enable_memory_monitoring: false, // Disable for demo
        monitoring_interval: Duration::from_secs(60),
    };

    let monitor = RagPerformanceMonitor::new(rag_engine, config);

    println!("\n๐Ÿ“Š Running performance benchmark...");
    let metrics = monitor.run_benchmark().await?;

    println!("\n๐Ÿ“ˆ Performance Metrics:");
    println!("======================");
    println!(
        "Retrieval Time: {:.2} ms",
        metrics.retrieval.retrieval_time_ms
    );
    println!(
        "Generation Time: {:.2} ms",
        metrics.generation.generation_time_ms
    );
    println!(
        "Total Query Time: {:.2} ms",
        metrics.end_to_end.total_time_ms
    );
    println!("Tokens/Second: {:.1}", metrics.generation.tokens_per_second);
    println!(
        "Queries/Second: {:.2}",
        metrics.end_to_end.queries_per_second
    );
    println!(
        "Success Rate: {:.1}%",
        metrics.end_to_end.success_rate * 100.0
    );

    println!("\n๐Ÿ” Checking for regressions...");
    let regressions = monitor.detect_regressions().await?;

    if regressions.is_empty() {
        println!("โœ… No performance regressions detected.");
    } else {
        println!("๐Ÿšจ Performance Regressions Found:");
        for regression in regressions {
            println!(
                "โ€ข {} degraded by {:.1}% (p-value: {:.4})",
                regression.metric,
                regression.change_percent * 100.0,
                regression.p_value
            );
        }
    }

    println!("\n๐Ÿ“‹ Performance History Summary:");
    let history = monitor.get_history_summary().await?;
    if let Some(total) = history.get("total_measurements") {
        println!("Total measurements: {}", total);
    }

    println!("\nโœจ Demo completed successfully!");

    Ok(())
}