use anyhow::Result;
use chrono::{DateTime, Duration, Utc};
#[cfg(feature = "excel-export")]
use rust_xlsxwriter::{Format, Workbook};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::sync::Arc;
use tokio::sync::RwLock;
use tracing::info;
pub struct DashboardAnalytics {
query_metrics: Arc<RwLock<QueryMetrics>>,
user_activity: Arc<RwLock<UserActivityTracker>>,
system_health: Arc<RwLock<SystemHealthMetrics>>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DashboardConfig {
pub retention_days: u32,
pub enable_realtime: bool,
pub aggregation_interval_secs: u64,
pub max_data_points: usize,
}
impl Default for DashboardConfig {
fn default() -> Self {
Self {
retention_days: 30,
enable_realtime: true,
aggregation_interval_secs: 300, max_data_points: 100,
}
}
}
#[derive(Debug, Clone, Default)]
pub struct QueryMetrics {
pub total_queries: u64,
pub successful_queries: u64,
pub failed_queries: u64,
pub avg_response_time_ms: f64,
pub p95_response_time_ms: f64,
pub p99_response_time_ms: f64,
pub query_history: Vec<QueryRecord>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct QueryRecord {
pub query_id: String,
pub query_type: QueryType,
pub execution_time_ms: u64,
pub result_count: usize,
pub success: bool,
pub timestamp: DateTime<Utc>,
pub error: Option<String>,
}
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Hash)]
#[serde(rename_all = "snake_case")]
pub enum QueryType {
NaturalLanguage,
Sparql,
VectorSearch,
Hybrid,
}
#[derive(Debug, Clone, Default)]
pub struct UserActivityTracker {
pub active_users_24h: u64,
pub total_sessions: u64,
pub avg_session_duration_secs: f64,
pub activity_timeline: Vec<ActivityDataPoint>,
pub top_users: Vec<UserActivity>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ActivityDataPoint {
pub timestamp: DateTime<Utc>,
pub active_users: u64,
pub queries_per_minute: f64,
pub avg_response_time_ms: f64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UserActivity {
pub user_id: String,
pub query_count: u64,
pub session_count: u64,
pub total_time_secs: u64,
pub last_active: DateTime<Utc>,
}
#[derive(Debug, Clone, Default)]
pub struct SystemHealthMetrics {
pub cpu_usage_percent: f64,
pub memory_usage_mb: f64,
pub active_connections: u64,
pub cache_hit_rate: f64,
pub error_rate: f64,
pub health_timeline: Vec<HealthDataPoint>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct HealthDataPoint {
pub timestamp: DateTime<Utc>,
pub cpu_percent: f64,
pub memory_mb: f64,
pub active_connections: u64,
pub requests_per_second: f64,
}
impl DashboardAnalytics {
pub fn new(config: DashboardConfig) -> Self {
info!(
"Initializing dashboard analytics with retention: {} days",
config.retention_days
);
Self {
query_metrics: Arc::new(RwLock::new(QueryMetrics::default())),
user_activity: Arc::new(RwLock::new(UserActivityTracker::default())),
system_health: Arc::new(RwLock::new(SystemHealthMetrics::default())),
}
}
pub async fn get_overview(&self) -> DashboardOverview {
let query_metrics = self.query_metrics.read().await;
let user_activity = self.user_activity.read().await;
let system_health = self.system_health.read().await;
DashboardOverview {
total_queries: query_metrics.total_queries,
successful_queries: query_metrics.successful_queries,
failed_queries: query_metrics.failed_queries,
avg_response_time_ms: query_metrics.avg_response_time_ms,
active_users_24h: user_activity.active_users_24h,
total_sessions: user_activity.total_sessions,
cpu_usage_percent: system_health.cpu_usage_percent,
memory_usage_mb: system_health.memory_usage_mb,
cache_hit_rate: system_health.cache_hit_rate,
error_rate: system_health.error_rate,
timestamp: Utc::now(),
}
}
pub async fn get_query_analytics(&self, time_range: TimeRange) -> QueryAnalytics {
let metrics = self.query_metrics.read().await;
let filtered_queries: Vec<_> = metrics
.query_history
.iter()
.filter(|q| time_range.contains(q.timestamp))
.cloned()
.collect();
let total = filtered_queries.len() as u64;
let successful = filtered_queries.iter().filter(|q| q.success).count() as u64;
let failed = total - successful;
let execution_times: Vec<f64> = filtered_queries
.iter()
.map(|q| q.execution_time_ms as f64)
.collect();
let avg_time = if !execution_times.is_empty() {
execution_times.iter().sum::<f64>() / execution_times.len() as f64
} else {
0.0
};
let mut type_distribution = HashMap::new();
for query in &filtered_queries {
*type_distribution.entry(query.query_type).or_insert(0) += 1;
}
QueryAnalytics {
total_queries: total,
successful_queries: successful,
failed_queries: failed,
avg_response_time_ms: avg_time,
p95_response_time_ms: Self::calculate_percentile(&execution_times, 0.95),
p99_response_time_ms: Self::calculate_percentile(&execution_times, 0.99),
query_type_distribution: type_distribution,
time_range,
}
}
pub async fn get_user_analytics(&self, time_range: TimeRange) -> UserAnalytics {
let activity = self.user_activity.read().await;
let filtered_timeline: Vec<_> = activity
.activity_timeline
.iter()
.filter(|a| time_range.contains(a.timestamp))
.cloned()
.collect();
UserAnalytics {
active_users: activity.active_users_24h,
total_sessions: activity.total_sessions,
avg_session_duration_secs: activity.avg_session_duration_secs,
activity_timeline: filtered_timeline,
top_users: activity.top_users.clone(),
time_range,
}
}
pub async fn get_health_analytics(&self, time_range: TimeRange) -> HealthAnalytics {
let health = self.system_health.read().await;
let filtered_timeline: Vec<_> = health
.health_timeline
.iter()
.filter(|h| time_range.contains(h.timestamp))
.cloned()
.collect();
HealthAnalytics {
current_cpu_percent: health.cpu_usage_percent,
current_memory_mb: health.memory_usage_mb,
active_connections: health.active_connections,
cache_hit_rate: health.cache_hit_rate,
error_rate: health.error_rate,
health_timeline: filtered_timeline,
time_range,
}
}
pub async fn record_query(&self, record: QueryRecord) {
let mut metrics = self.query_metrics.write().await;
metrics.total_queries += 1;
if record.success {
metrics.successful_queries += 1;
} else {
metrics.failed_queries += 1;
}
let total_time = metrics.avg_response_time_ms * (metrics.total_queries - 1) as f64
+ record.execution_time_ms as f64;
metrics.avg_response_time_ms = total_time / metrics.total_queries as f64;
metrics.query_history.push(record);
if metrics.query_history.len() > 10_000 {
metrics.query_history.drain(0..1_000);
}
}
pub async fn update_user_activity(&self, user_id: String, query_count: u64) {
let mut activity = self.user_activity.write().await;
if let Some(user) = activity.top_users.iter_mut().find(|u| u.user_id == user_id) {
user.query_count += query_count;
user.last_active = Utc::now();
} else {
activity.top_users.push(UserActivity {
user_id,
query_count,
session_count: 1,
total_time_secs: 0,
last_active: Utc::now(),
});
}
activity
.top_users
.sort_by_key(|item| std::cmp::Reverse(item.query_count));
activity.top_users.truncate(100);
}
pub async fn update_health(&self, cpu_percent: f64, memory_mb: f64, connections: u64) {
let mut health = self.system_health.write().await;
health.cpu_usage_percent = cpu_percent;
health.memory_usage_mb = memory_mb;
health.active_connections = connections;
let requests_per_second = self.calculate_requests_per_second().await;
health.health_timeline.push(HealthDataPoint {
timestamp: Utc::now(),
cpu_percent,
memory_mb,
active_connections: connections,
requests_per_second,
});
if health.health_timeline.len() > 288 {
health.health_timeline.drain(0..100);
}
}
async fn calculate_requests_per_second(&self) -> f64 {
let metrics = self.query_metrics.read().await;
let now = Utc::now();
let one_minute_ago = now - Duration::seconds(60);
let recent_queries = metrics
.query_history
.iter()
.filter(|q| q.timestamp >= one_minute_ago)
.count();
recent_queries as f64 / 60.0
}
fn calculate_percentile(values: &[f64], percentile: f64) -> f64 {
if values.is_empty() {
return 0.0;
}
let mut sorted = values.to_vec();
sorted.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal));
let index = (percentile * sorted.len() as f64) as usize;
sorted.get(index).copied().unwrap_or(0.0)
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DashboardOverview {
pub total_queries: u64,
pub successful_queries: u64,
pub failed_queries: u64,
pub avg_response_time_ms: f64,
pub active_users_24h: u64,
pub total_sessions: u64,
pub cpu_usage_percent: f64,
pub memory_usage_mb: f64,
pub cache_hit_rate: f64,
pub error_rate: f64,
pub timestamp: DateTime<Utc>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct QueryAnalytics {
pub total_queries: u64,
pub successful_queries: u64,
pub failed_queries: u64,
pub avg_response_time_ms: f64,
pub p95_response_time_ms: f64,
pub p99_response_time_ms: f64,
pub query_type_distribution: HashMap<QueryType, u64>,
pub time_range: TimeRange,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UserAnalytics {
pub active_users: u64,
pub total_sessions: u64,
pub avg_session_duration_secs: f64,
pub activity_timeline: Vec<ActivityDataPoint>,
pub top_users: Vec<UserActivity>,
pub time_range: TimeRange,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct HealthAnalytics {
pub current_cpu_percent: f64,
pub current_memory_mb: f64,
pub active_connections: u64,
pub cache_hit_rate: f64,
pub error_rate: f64,
pub health_timeline: Vec<HealthDataPoint>,
pub time_range: TimeRange,
}
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
pub struct TimeRange {
pub start: DateTime<Utc>,
pub end: DateTime<Utc>,
}
impl TimeRange {
pub fn last_hours(hours: i64) -> Self {
let end = Utc::now();
let start = end - Duration::hours(hours);
Self { start, end }
}
pub fn last_days(days: i64) -> Self {
let end = Utc::now();
let start = end - Duration::days(days);
Self { start, end }
}
pub fn contains(&self, timestamp: DateTime<Utc>) -> bool {
timestamp >= self.start && timestamp <= self.end
}
}
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum ExportFormat {
Json,
Csv,
Excel,
}
impl DashboardAnalytics {
pub async fn export_data(
&self,
format: ExportFormat,
time_range: TimeRange,
) -> Result<Vec<u8>> {
match format {
ExportFormat::Json => self.export_json(time_range).await,
ExportFormat::Csv => self.export_csv(time_range).await,
ExportFormat::Excel => {
#[cfg(feature = "excel-export")]
{
self.export_excel(time_range).await
}
#[cfg(not(feature = "excel-export"))]
{
anyhow::bail!("Excel export requires the 'excel-export' feature to be enabled")
}
}
}
}
async fn export_json(&self, time_range: TimeRange) -> Result<Vec<u8>> {
let overview = self.get_overview().await;
let query_analytics = self.get_query_analytics(time_range).await;
let user_analytics = self.get_user_analytics(time_range).await;
let health_analytics = self.get_health_analytics(time_range).await;
let export_data = serde_json::json!({
"overview": overview,
"query_analytics": query_analytics,
"user_analytics": user_analytics,
"health_analytics": health_analytics,
});
Ok(serde_json::to_vec_pretty(&export_data)?)
}
async fn export_csv(&self, time_range: TimeRange) -> Result<Vec<u8>> {
let query_analytics = self.get_query_analytics(time_range).await;
let user_analytics = self.get_user_analytics(time_range).await;
let health_analytics = self.get_health_analytics(time_range).await;
let mut csv_output = String::new();
csv_output.push_str("=== QUERY ANALYTICS ===\n");
csv_output.push_str("Metric,Value\n");
csv_output.push_str(&format!(
"Total Queries,{}\n",
query_analytics.total_queries
));
csv_output.push_str(&format!(
"Successful Queries,{}\n",
query_analytics.successful_queries
));
csv_output.push_str(&format!(
"Failed Queries,{}\n",
query_analytics.failed_queries
));
csv_output.push_str(&format!(
"Average Response Time (ms),{:.2}\n",
query_analytics.avg_response_time_ms
));
csv_output.push_str(&format!(
"P95 Response Time (ms),{:.2}\n",
query_analytics.p95_response_time_ms
));
csv_output.push_str(&format!(
"P99 Response Time (ms),{:.2}\n",
query_analytics.p99_response_time_ms
));
csv_output.push('\n');
csv_output.push_str("=== QUERY TYPE DISTRIBUTION ===\n");
csv_output.push_str("Query Type,Count\n");
for (query_type, count) in &query_analytics.query_type_distribution {
csv_output.push_str(&format!("{:?},{}\n", query_type, count));
}
csv_output.push('\n');
csv_output.push_str("=== USER ANALYTICS ===\n");
csv_output.push_str("Metric,Value\n");
csv_output.push_str(&format!("Active Users,{}\n", user_analytics.active_users));
csv_output.push_str(&format!(
"Total Sessions,{}\n",
user_analytics.total_sessions
));
csv_output.push_str(&format!(
"Avg Session Duration (secs),{:.2}\n",
user_analytics.avg_session_duration_secs
));
csv_output.push('\n');
csv_output.push_str("=== TOP USERS ===\n");
csv_output.push_str("User ID,Query Count,Session Count,Total Time (secs),Last Active\n");
for user in &user_analytics.top_users {
csv_output.push_str(&format!(
"{},{},{},{},{}\n",
user.user_id,
user.query_count,
user.session_count,
user.total_time_secs,
user.last_active.to_rfc3339()
));
}
csv_output.push('\n');
csv_output.push_str("=== HEALTH ANALYTICS ===\n");
csv_output.push_str("Metric,Value\n");
csv_output.push_str(&format!(
"Current CPU (%),{:.2}\n",
health_analytics.current_cpu_percent
));
csv_output.push_str(&format!(
"Current Memory (MB),{:.2}\n",
health_analytics.current_memory_mb
));
csv_output.push_str(&format!(
"Active Connections,{}\n",
health_analytics.active_connections
));
csv_output.push_str(&format!(
"Cache Hit Rate,{:.2}\n",
health_analytics.cache_hit_rate
));
csv_output.push_str(&format!("Error Rate,{:.2}\n", health_analytics.error_rate));
csv_output.push('\n');
csv_output.push_str("=== HEALTH TIMELINE ===\n");
csv_output.push_str("Timestamp,CPU (%),Memory (MB),Active Connections,Requests/Second\n");
for datapoint in &health_analytics.health_timeline {
csv_output.push_str(&format!(
"{},{:.2},{:.2},{},{:.2}\n",
datapoint.timestamp.to_rfc3339(),
datapoint.cpu_percent,
datapoint.memory_mb,
datapoint.active_connections,
datapoint.requests_per_second
));
}
csv_output.push('\n');
csv_output.push_str("=== ACTIVITY TIMELINE ===\n");
csv_output.push_str("Timestamp,Active Users,Queries/Min,Avg Response Time (ms)\n");
for datapoint in &user_analytics.activity_timeline {
csv_output.push_str(&format!(
"{},{},{:.2},{:.2}\n",
datapoint.timestamp.to_rfc3339(),
datapoint.active_users,
datapoint.queries_per_minute,
datapoint.avg_response_time_ms
));
}
Ok(csv_output.into_bytes())
}
#[cfg(feature = "excel-export")]
async fn export_excel(&self, time_range: TimeRange) -> Result<Vec<u8>> {
let query_analytics = self.get_query_analytics(time_range).await;
let user_analytics = self.get_user_analytics(time_range).await;
let health_analytics = self.get_health_analytics(time_range).await;
let mut workbook = Workbook::new();
let header_format = Format::new().set_bold();
let worksheet = workbook.add_worksheet();
worksheet.set_name("Query Analytics")?;
worksheet.write_string_with_format(0, 0, "Metric", &header_format)?;
worksheet.write_string_with_format(0, 1, "Value", &header_format)?;
let mut row = 1;
worksheet.write_string(row, 0, "Total Queries")?;
worksheet.write_number(row, 1, query_analytics.total_queries as f64)?;
row += 1;
worksheet.write_string(row, 0, "Successful Queries")?;
worksheet.write_number(row, 1, query_analytics.successful_queries as f64)?;
row += 1;
worksheet.write_string(row, 0, "Failed Queries")?;
worksheet.write_number(row, 1, query_analytics.failed_queries as f64)?;
row += 1;
worksheet.write_string(row, 0, "Avg Response Time (ms)")?;
worksheet.write_number(row, 1, query_analytics.avg_response_time_ms)?;
row += 1;
worksheet.write_string(row, 0, "P95 Response Time (ms)")?;
worksheet.write_number(row, 1, query_analytics.p95_response_time_ms)?;
row += 1;
worksheet.write_string(row, 0, "P99 Response Time (ms)")?;
worksheet.write_number(row, 1, query_analytics.p99_response_time_ms)?;
let worksheet = workbook.add_worksheet();
worksheet.set_name("Query Types")?;
worksheet.write_string_with_format(0, 0, "Query Type", &header_format)?;
worksheet.write_string_with_format(0, 1, "Count", &header_format)?;
for (row, (query_type, count)) in (1..).zip(query_analytics.query_type_distribution.iter())
{
worksheet.write_string(row, 0, format!("{:?}", query_type))?;
worksheet.write_number(row, 1, *count as f64)?;
}
let worksheet = workbook.add_worksheet();
worksheet.set_name("User Analytics")?;
worksheet.write_string_with_format(0, 0, "Metric", &header_format)?;
worksheet.write_string_with_format(0, 1, "Value", &header_format)?;
let mut row = 1;
worksheet.write_string(row, 0, "Active Users")?;
worksheet.write_number(row, 1, user_analytics.active_users as f64)?;
row += 1;
worksheet.write_string(row, 0, "Total Sessions")?;
worksheet.write_number(row, 1, user_analytics.total_sessions as f64)?;
row += 1;
worksheet.write_string(row, 0, "Avg Session Duration (secs)")?;
worksheet.write_number(row, 1, user_analytics.avg_session_duration_secs)?;
let worksheet = workbook.add_worksheet();
worksheet.set_name("Top Users")?;
worksheet.write_string_with_format(0, 0, "User ID", &header_format)?;
worksheet.write_string_with_format(0, 1, "Query Count", &header_format)?;
worksheet.write_string_with_format(0, 2, "Session Count", &header_format)?;
worksheet.write_string_with_format(0, 3, "Total Time (secs)", &header_format)?;
worksheet.write_string_with_format(0, 4, "Last Active", &header_format)?;
for (row, user) in (1..).zip(user_analytics.top_users.iter()) {
worksheet.write_string(row, 0, &user.user_id)?;
worksheet.write_number(row, 1, user.query_count as f64)?;
worksheet.write_number(row, 2, user.session_count as f64)?;
worksheet.write_number(row, 3, user.total_time_secs as f64)?;
worksheet.write_string(row, 4, user.last_active.to_rfc3339())?;
}
let worksheet = workbook.add_worksheet();
worksheet.set_name("Health Analytics")?;
worksheet.write_string_with_format(0, 0, "Metric", &header_format)?;
worksheet.write_string_with_format(0, 1, "Value", &header_format)?;
let mut row = 1;
worksheet.write_string(row, 0, "Current CPU (%)")?;
worksheet.write_number(row, 1, health_analytics.current_cpu_percent)?;
row += 1;
worksheet.write_string(row, 0, "Current Memory (MB)")?;
worksheet.write_number(row, 1, health_analytics.current_memory_mb)?;
row += 1;
worksheet.write_string(row, 0, "Active Connections")?;
worksheet.write_number(row, 1, health_analytics.active_connections as f64)?;
row += 1;
worksheet.write_string(row, 0, "Cache Hit Rate")?;
worksheet.write_number(row, 1, health_analytics.cache_hit_rate)?;
row += 1;
worksheet.write_string(row, 0, "Error Rate")?;
worksheet.write_number(row, 1, health_analytics.error_rate)?;
let worksheet = workbook.add_worksheet();
worksheet.set_name("Health Timeline")?;
worksheet.write_string_with_format(0, 0, "Timestamp", &header_format)?;
worksheet.write_string_with_format(0, 1, "CPU (%)", &header_format)?;
worksheet.write_string_with_format(0, 2, "Memory (MB)", &header_format)?;
worksheet.write_string_with_format(0, 3, "Active Connections", &header_format)?;
worksheet.write_string_with_format(0, 4, "Requests/Second", &header_format)?;
for (row, datapoint) in (1..).zip(health_analytics.health_timeline.iter()) {
worksheet.write_string(row, 0, datapoint.timestamp.to_rfc3339())?;
worksheet.write_number(row, 1, datapoint.cpu_percent)?;
worksheet.write_number(row, 2, datapoint.memory_mb)?;
worksheet.write_number(row, 3, datapoint.active_connections as f64)?;
worksheet.write_number(row, 4, datapoint.requests_per_second)?;
}
let worksheet = workbook.add_worksheet();
worksheet.set_name("Activity Timeline")?;
worksheet.write_string_with_format(0, 0, "Timestamp", &header_format)?;
worksheet.write_string_with_format(0, 1, "Active Users", &header_format)?;
worksheet.write_string_with_format(0, 2, "Queries/Min", &header_format)?;
worksheet.write_string_with_format(0, 3, "Avg Response Time (ms)", &header_format)?;
for (row, datapoint) in (1..).zip(user_analytics.activity_timeline.iter()) {
worksheet.write_string(row, 0, datapoint.timestamp.to_rfc3339())?;
worksheet.write_number(row, 1, datapoint.active_users as f64)?;
worksheet.write_number(row, 2, datapoint.queries_per_minute)?;
worksheet.write_number(row, 3, datapoint.avg_response_time_ms)?;
}
let buffer = workbook.save_to_buffer()?;
Ok(buffer)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn test_dashboard_creation() {
let config = DashboardConfig::default();
let dashboard = DashboardAnalytics::new(config);
let overview = dashboard.get_overview().await;
assert_eq!(overview.total_queries, 0);
assert_eq!(overview.active_users_24h, 0);
}
#[tokio::test]
async fn test_record_query() {
let config = DashboardConfig::default();
let dashboard = DashboardAnalytics::new(config);
let record = QueryRecord {
query_id: "test-query-1".to_string(),
query_type: QueryType::NaturalLanguage,
execution_time_ms: 150,
result_count: 5,
success: true,
timestamp: Utc::now(),
error: None,
};
dashboard.record_query(record).await;
let overview = dashboard.get_overview().await;
assert_eq!(overview.total_queries, 1);
assert_eq!(overview.successful_queries, 1);
}
#[tokio::test]
async fn test_time_range() {
let now = Utc::now();
let range = TimeRange {
start: now - Duration::hours(24),
end: now + Duration::hours(1), };
assert!(range.contains(now));
assert!(!range.contains(now - Duration::days(2)));
}
#[test]
fn test_percentile_calculation() {
let values = vec![1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0];
let p95 = DashboardAnalytics::calculate_percentile(&values, 0.95);
assert!(p95 >= 9.0);
}
#[tokio::test]
async fn test_csv_export_with_data() {
let config = DashboardConfig::default();
let dashboard = DashboardAnalytics::new(config);
dashboard
.record_query(QueryRecord {
query_id: "csv_test".to_string(),
query_type: QueryType::VectorSearch,
execution_time_ms: 75,
result_count: 20,
success: true,
timestamp: Utc::now(),
error: None,
})
.await;
let time_range = TimeRange::last_hours(24);
let csv_data = dashboard
.export_data(ExportFormat::Csv, time_range)
.await
.expect("should succeed");
let csv_str = String::from_utf8(csv_data).expect("should succeed");
assert!(csv_str.contains("=== QUERY ANALYTICS ==="));
assert!(csv_str.contains("Total Queries,1"));
}
#[tokio::test]
#[cfg(feature = "excel-export")]
async fn test_excel_export_with_data() {
let config = DashboardConfig::default();
let dashboard = DashboardAnalytics::new(config);
for i in 0..3 {
dashboard
.record_query(QueryRecord {
query_id: format!("excel_{}", i),
query_type: QueryType::Sparql,
execution_time_ms: 100,
result_count: 10,
success: true,
timestamp: Utc::now(),
error: None,
})
.await;
}
let time_range = TimeRange::last_days(1);
let excel_data = dashboard
.export_data(ExportFormat::Excel, time_range)
.await
.expect("should succeed");
assert!(!excel_data.is_empty());
assert_eq!(&excel_data[0..2], b"PK"); }
#[tokio::test]
async fn test_rps_calculation() {
let config = DashboardConfig::default();
let dashboard = DashboardAnalytics::new(config);
for _ in 0..5 {
dashboard
.record_query(QueryRecord {
query_id: format!("rps_{}", fastrand::u32(..)),
query_type: QueryType::Hybrid,
execution_time_ms: 50,
result_count: 5,
success: true,
timestamp: Utc::now(),
error: None,
})
.await;
}
dashboard.update_health(45.0, 500.0, 8).await;
let health = dashboard
.get_health_analytics(TimeRange::last_hours(1))
.await;
assert!(!health.health_timeline.is_empty());
}
}