1use anyhow::Result;
7use chrono::{DateTime, Duration, Utc};
8#[cfg(feature = "excel-export")]
9use rust_xlsxwriter::{Format, Workbook};
10use serde::{Deserialize, Serialize};
11use std::collections::HashMap;
12use std::sync::Arc;
13use tokio::sync::RwLock;
14use tracing::info;
15
16pub struct DashboardAnalytics {
18 query_metrics: Arc<RwLock<QueryMetrics>>,
20 user_activity: Arc<RwLock<UserActivityTracker>>,
22 system_health: Arc<RwLock<SystemHealthMetrics>>,
24}
25
26#[derive(Debug, Clone, Serialize, Deserialize)]
28pub struct DashboardConfig {
29 pub retention_days: u32,
31 pub enable_realtime: bool,
33 pub aggregation_interval_secs: u64,
35 pub max_data_points: usize,
37}
38
39impl Default for DashboardConfig {
40 fn default() -> Self {
41 Self {
42 retention_days: 30,
43 enable_realtime: true,
44 aggregation_interval_secs: 300, max_data_points: 100,
46 }
47 }
48}
49
50#[derive(Debug, Clone, Default)]
52pub struct QueryMetrics {
53 pub total_queries: u64,
55 pub successful_queries: u64,
57 pub failed_queries: u64,
59 pub avg_response_time_ms: f64,
61 pub p95_response_time_ms: f64,
63 pub p99_response_time_ms: f64,
65 pub query_history: Vec<QueryRecord>,
67}
68
69#[derive(Debug, Clone, Serialize, Deserialize)]
71pub struct QueryRecord {
72 pub query_id: String,
73 pub query_type: QueryType,
74 pub execution_time_ms: u64,
75 pub result_count: usize,
76 pub success: bool,
77 pub timestamp: DateTime<Utc>,
78 pub error: Option<String>,
79}
80
81#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Hash)]
83#[serde(rename_all = "snake_case")]
84pub enum QueryType {
85 NaturalLanguage,
86 Sparql,
87 VectorSearch,
88 Hybrid,
89}
90
91#[derive(Debug, Clone, Default)]
93pub struct UserActivityTracker {
94 pub active_users_24h: u64,
96 pub total_sessions: u64,
98 pub avg_session_duration_secs: f64,
100 pub activity_timeline: Vec<ActivityDataPoint>,
102 pub top_users: Vec<UserActivity>,
104}
105
106#[derive(Debug, Clone, Serialize, Deserialize)]
108pub struct ActivityDataPoint {
109 pub timestamp: DateTime<Utc>,
110 pub active_users: u64,
111 pub queries_per_minute: f64,
112 pub avg_response_time_ms: f64,
113}
114
115#[derive(Debug, Clone, Serialize, Deserialize)]
117pub struct UserActivity {
118 pub user_id: String,
119 pub query_count: u64,
120 pub session_count: u64,
121 pub total_time_secs: u64,
122 pub last_active: DateTime<Utc>,
123}
124
125#[derive(Debug, Clone, Default)]
127pub struct SystemHealthMetrics {
128 pub cpu_usage_percent: f64,
130 pub memory_usage_mb: f64,
132 pub active_connections: u64,
134 pub cache_hit_rate: f64,
136 pub error_rate: f64,
138 pub health_timeline: Vec<HealthDataPoint>,
140}
141
142#[derive(Debug, Clone, Serialize, Deserialize)]
144pub struct HealthDataPoint {
145 pub timestamp: DateTime<Utc>,
146 pub cpu_percent: f64,
147 pub memory_mb: f64,
148 pub active_connections: u64,
149 pub requests_per_second: f64,
150}
151
152impl DashboardAnalytics {
153 pub fn new(config: DashboardConfig) -> Self {
155 info!(
156 "Initializing dashboard analytics with retention: {} days",
157 config.retention_days
158 );
159
160 Self {
161 query_metrics: Arc::new(RwLock::new(QueryMetrics::default())),
162 user_activity: Arc::new(RwLock::new(UserActivityTracker::default())),
163 system_health: Arc::new(RwLock::new(SystemHealthMetrics::default())),
164 }
165 }
166
167 pub async fn get_overview(&self) -> DashboardOverview {
169 let query_metrics = self.query_metrics.read().await;
170 let user_activity = self.user_activity.read().await;
171 let system_health = self.system_health.read().await;
172
173 DashboardOverview {
174 total_queries: query_metrics.total_queries,
175 successful_queries: query_metrics.successful_queries,
176 failed_queries: query_metrics.failed_queries,
177 avg_response_time_ms: query_metrics.avg_response_time_ms,
178 active_users_24h: user_activity.active_users_24h,
179 total_sessions: user_activity.total_sessions,
180 cpu_usage_percent: system_health.cpu_usage_percent,
181 memory_usage_mb: system_health.memory_usage_mb,
182 cache_hit_rate: system_health.cache_hit_rate,
183 error_rate: system_health.error_rate,
184 timestamp: Utc::now(),
185 }
186 }
187
188 pub async fn get_query_analytics(&self, time_range: TimeRange) -> QueryAnalytics {
190 let metrics = self.query_metrics.read().await;
191
192 let filtered_queries: Vec<_> = metrics
194 .query_history
195 .iter()
196 .filter(|q| time_range.contains(q.timestamp))
197 .cloned()
198 .collect();
199
200 let total = filtered_queries.len() as u64;
202 let successful = filtered_queries.iter().filter(|q| q.success).count() as u64;
203 let failed = total - successful;
204
205 let execution_times: Vec<f64> = filtered_queries
206 .iter()
207 .map(|q| q.execution_time_ms as f64)
208 .collect();
209
210 let avg_time = if !execution_times.is_empty() {
211 execution_times.iter().sum::<f64>() / execution_times.len() as f64
212 } else {
213 0.0
214 };
215
216 let mut type_distribution = HashMap::new();
218 for query in &filtered_queries {
219 *type_distribution.entry(query.query_type).or_insert(0) += 1;
220 }
221
222 QueryAnalytics {
223 total_queries: total,
224 successful_queries: successful,
225 failed_queries: failed,
226 avg_response_time_ms: avg_time,
227 p95_response_time_ms: Self::calculate_percentile(&execution_times, 0.95),
228 p99_response_time_ms: Self::calculate_percentile(&execution_times, 0.99),
229 query_type_distribution: type_distribution,
230 time_range,
231 }
232 }
233
234 pub async fn get_user_analytics(&self, time_range: TimeRange) -> UserAnalytics {
236 let activity = self.user_activity.read().await;
237
238 let filtered_timeline: Vec<_> = activity
240 .activity_timeline
241 .iter()
242 .filter(|a| time_range.contains(a.timestamp))
243 .cloned()
244 .collect();
245
246 UserAnalytics {
247 active_users: activity.active_users_24h,
248 total_sessions: activity.total_sessions,
249 avg_session_duration_secs: activity.avg_session_duration_secs,
250 activity_timeline: filtered_timeline,
251 top_users: activity.top_users.clone(),
252 time_range,
253 }
254 }
255
256 pub async fn get_health_analytics(&self, time_range: TimeRange) -> HealthAnalytics {
258 let health = self.system_health.read().await;
259
260 let filtered_timeline: Vec<_> = health
262 .health_timeline
263 .iter()
264 .filter(|h| time_range.contains(h.timestamp))
265 .cloned()
266 .collect();
267
268 HealthAnalytics {
269 current_cpu_percent: health.cpu_usage_percent,
270 current_memory_mb: health.memory_usage_mb,
271 active_connections: health.active_connections,
272 cache_hit_rate: health.cache_hit_rate,
273 error_rate: health.error_rate,
274 health_timeline: filtered_timeline,
275 time_range,
276 }
277 }
278
279 pub async fn record_query(&self, record: QueryRecord) {
281 let mut metrics = self.query_metrics.write().await;
282
283 metrics.total_queries += 1;
284 if record.success {
285 metrics.successful_queries += 1;
286 } else {
287 metrics.failed_queries += 1;
288 }
289
290 let total_time = metrics.avg_response_time_ms * (metrics.total_queries - 1) as f64
292 + record.execution_time_ms as f64;
293 metrics.avg_response_time_ms = total_time / metrics.total_queries as f64;
294
295 metrics.query_history.push(record);
296
297 if metrics.query_history.len() > 10_000 {
299 metrics.query_history.drain(0..1_000);
300 }
301 }
302
303 pub async fn update_user_activity(&self, user_id: String, query_count: u64) {
305 let mut activity = self.user_activity.write().await;
306
307 if let Some(user) = activity.top_users.iter_mut().find(|u| u.user_id == user_id) {
309 user.query_count += query_count;
310 user.last_active = Utc::now();
311 } else {
312 activity.top_users.push(UserActivity {
313 user_id,
314 query_count,
315 session_count: 1,
316 total_time_secs: 0,
317 last_active: Utc::now(),
318 });
319 }
320
321 activity
323 .top_users
324 .sort_by_key(|item| std::cmp::Reverse(item.query_count));
325 activity.top_users.truncate(100);
326 }
327
328 pub async fn update_health(&self, cpu_percent: f64, memory_mb: f64, connections: u64) {
330 let mut health = self.system_health.write().await;
331
332 health.cpu_usage_percent = cpu_percent;
333 health.memory_usage_mb = memory_mb;
334 health.active_connections = connections;
335
336 let requests_per_second = self.calculate_requests_per_second().await;
338
339 health.health_timeline.push(HealthDataPoint {
341 timestamp: Utc::now(),
342 cpu_percent,
343 memory_mb,
344 active_connections: connections,
345 requests_per_second,
346 });
347
348 if health.health_timeline.len() > 288 {
350 health.health_timeline.drain(0..100);
351 }
352 }
353
354 async fn calculate_requests_per_second(&self) -> f64 {
356 let metrics = self.query_metrics.read().await;
357
358 let now = Utc::now();
360 let one_minute_ago = now - Duration::seconds(60);
361
362 let recent_queries = metrics
363 .query_history
364 .iter()
365 .filter(|q| q.timestamp >= one_minute_ago)
366 .count();
367
368 recent_queries as f64 / 60.0
370 }
371
372 fn calculate_percentile(values: &[f64], percentile: f64) -> f64 {
374 if values.is_empty() {
375 return 0.0;
376 }
377
378 let mut sorted = values.to_vec();
379 sorted.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal));
380
381 let index = (percentile * sorted.len() as f64) as usize;
382 sorted.get(index).copied().unwrap_or(0.0)
383 }
384}
385
386#[derive(Debug, Clone, Serialize, Deserialize)]
388pub struct DashboardOverview {
389 pub total_queries: u64,
390 pub successful_queries: u64,
391 pub failed_queries: u64,
392 pub avg_response_time_ms: f64,
393 pub active_users_24h: u64,
394 pub total_sessions: u64,
395 pub cpu_usage_percent: f64,
396 pub memory_usage_mb: f64,
397 pub cache_hit_rate: f64,
398 pub error_rate: f64,
399 pub timestamp: DateTime<Utc>,
400}
401
402#[derive(Debug, Clone, Serialize, Deserialize)]
404pub struct QueryAnalytics {
405 pub total_queries: u64,
406 pub successful_queries: u64,
407 pub failed_queries: u64,
408 pub avg_response_time_ms: f64,
409 pub p95_response_time_ms: f64,
410 pub p99_response_time_ms: f64,
411 pub query_type_distribution: HashMap<QueryType, u64>,
412 pub time_range: TimeRange,
413}
414
415#[derive(Debug, Clone, Serialize, Deserialize)]
417pub struct UserAnalytics {
418 pub active_users: u64,
419 pub total_sessions: u64,
420 pub avg_session_duration_secs: f64,
421 pub activity_timeline: Vec<ActivityDataPoint>,
422 pub top_users: Vec<UserActivity>,
423 pub time_range: TimeRange,
424}
425
426#[derive(Debug, Clone, Serialize, Deserialize)]
428pub struct HealthAnalytics {
429 pub current_cpu_percent: f64,
430 pub current_memory_mb: f64,
431 pub active_connections: u64,
432 pub cache_hit_rate: f64,
433 pub error_rate: f64,
434 pub health_timeline: Vec<HealthDataPoint>,
435 pub time_range: TimeRange,
436}
437
438#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
440pub struct TimeRange {
441 pub start: DateTime<Utc>,
442 pub end: DateTime<Utc>,
443}
444
445impl TimeRange {
446 pub fn last_hours(hours: i64) -> Self {
448 let end = Utc::now();
449 let start = end - Duration::hours(hours);
450 Self { start, end }
451 }
452
453 pub fn last_days(days: i64) -> Self {
455 let end = Utc::now();
456 let start = end - Duration::days(days);
457 Self { start, end }
458 }
459
460 pub fn contains(&self, timestamp: DateTime<Utc>) -> bool {
462 timestamp >= self.start && timestamp <= self.end
463 }
464}
465
466#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
468#[serde(rename_all = "snake_case")]
469pub enum ExportFormat {
470 Json,
471 Csv,
472 Excel,
473}
474
475impl DashboardAnalytics {
476 pub async fn export_data(
478 &self,
479 format: ExportFormat,
480 time_range: TimeRange,
481 ) -> Result<Vec<u8>> {
482 match format {
483 ExportFormat::Json => self.export_json(time_range).await,
484 ExportFormat::Csv => self.export_csv(time_range).await,
485 ExportFormat::Excel => {
486 #[cfg(feature = "excel-export")]
487 {
488 self.export_excel(time_range).await
489 }
490 #[cfg(not(feature = "excel-export"))]
491 {
492 anyhow::bail!("Excel export requires the 'excel-export' feature to be enabled")
493 }
494 }
495 }
496 }
497
498 async fn export_json(&self, time_range: TimeRange) -> Result<Vec<u8>> {
499 let overview = self.get_overview().await;
500 let query_analytics = self.get_query_analytics(time_range).await;
501 let user_analytics = self.get_user_analytics(time_range).await;
502 let health_analytics = self.get_health_analytics(time_range).await;
503
504 let export_data = serde_json::json!({
505 "overview": overview,
506 "query_analytics": query_analytics,
507 "user_analytics": user_analytics,
508 "health_analytics": health_analytics,
509 });
510
511 Ok(serde_json::to_vec_pretty(&export_data)?)
512 }
513
514 async fn export_csv(&self, time_range: TimeRange) -> Result<Vec<u8>> {
515 let query_analytics = self.get_query_analytics(time_range).await;
516 let user_analytics = self.get_user_analytics(time_range).await;
517 let health_analytics = self.get_health_analytics(time_range).await;
518
519 let mut csv_output = String::new();
520
521 csv_output.push_str("=== QUERY ANALYTICS ===\n");
523 csv_output.push_str("Metric,Value\n");
524 csv_output.push_str(&format!(
525 "Total Queries,{}\n",
526 query_analytics.total_queries
527 ));
528 csv_output.push_str(&format!(
529 "Successful Queries,{}\n",
530 query_analytics.successful_queries
531 ));
532 csv_output.push_str(&format!(
533 "Failed Queries,{}\n",
534 query_analytics.failed_queries
535 ));
536 csv_output.push_str(&format!(
537 "Average Response Time (ms),{:.2}\n",
538 query_analytics.avg_response_time_ms
539 ));
540 csv_output.push_str(&format!(
541 "P95 Response Time (ms),{:.2}\n",
542 query_analytics.p95_response_time_ms
543 ));
544 csv_output.push_str(&format!(
545 "P99 Response Time (ms),{:.2}\n",
546 query_analytics.p99_response_time_ms
547 ));
548 csv_output.push('\n');
549
550 csv_output.push_str("=== QUERY TYPE DISTRIBUTION ===\n");
552 csv_output.push_str("Query Type,Count\n");
553 for (query_type, count) in &query_analytics.query_type_distribution {
554 csv_output.push_str(&format!("{:?},{}\n", query_type, count));
555 }
556 csv_output.push('\n');
557
558 csv_output.push_str("=== USER ANALYTICS ===\n");
560 csv_output.push_str("Metric,Value\n");
561 csv_output.push_str(&format!("Active Users,{}\n", user_analytics.active_users));
562 csv_output.push_str(&format!(
563 "Total Sessions,{}\n",
564 user_analytics.total_sessions
565 ));
566 csv_output.push_str(&format!(
567 "Avg Session Duration (secs),{:.2}\n",
568 user_analytics.avg_session_duration_secs
569 ));
570 csv_output.push('\n');
571
572 csv_output.push_str("=== TOP USERS ===\n");
574 csv_output.push_str("User ID,Query Count,Session Count,Total Time (secs),Last Active\n");
575 for user in &user_analytics.top_users {
576 csv_output.push_str(&format!(
577 "{},{},{},{},{}\n",
578 user.user_id,
579 user.query_count,
580 user.session_count,
581 user.total_time_secs,
582 user.last_active.to_rfc3339()
583 ));
584 }
585 csv_output.push('\n');
586
587 csv_output.push_str("=== HEALTH ANALYTICS ===\n");
589 csv_output.push_str("Metric,Value\n");
590 csv_output.push_str(&format!(
591 "Current CPU (%),{:.2}\n",
592 health_analytics.current_cpu_percent
593 ));
594 csv_output.push_str(&format!(
595 "Current Memory (MB),{:.2}\n",
596 health_analytics.current_memory_mb
597 ));
598 csv_output.push_str(&format!(
599 "Active Connections,{}\n",
600 health_analytics.active_connections
601 ));
602 csv_output.push_str(&format!(
603 "Cache Hit Rate,{:.2}\n",
604 health_analytics.cache_hit_rate
605 ));
606 csv_output.push_str(&format!("Error Rate,{:.2}\n", health_analytics.error_rate));
607 csv_output.push('\n');
608
609 csv_output.push_str("=== HEALTH TIMELINE ===\n");
611 csv_output.push_str("Timestamp,CPU (%),Memory (MB),Active Connections,Requests/Second\n");
612 for datapoint in &health_analytics.health_timeline {
613 csv_output.push_str(&format!(
614 "{},{:.2},{:.2},{},{:.2}\n",
615 datapoint.timestamp.to_rfc3339(),
616 datapoint.cpu_percent,
617 datapoint.memory_mb,
618 datapoint.active_connections,
619 datapoint.requests_per_second
620 ));
621 }
622 csv_output.push('\n');
623
624 csv_output.push_str("=== ACTIVITY TIMELINE ===\n");
626 csv_output.push_str("Timestamp,Active Users,Queries/Min,Avg Response Time (ms)\n");
627 for datapoint in &user_analytics.activity_timeline {
628 csv_output.push_str(&format!(
629 "{},{},{:.2},{:.2}\n",
630 datapoint.timestamp.to_rfc3339(),
631 datapoint.active_users,
632 datapoint.queries_per_minute,
633 datapoint.avg_response_time_ms
634 ));
635 }
636
637 Ok(csv_output.into_bytes())
638 }
639
640 #[cfg(feature = "excel-export")]
641 async fn export_excel(&self, time_range: TimeRange) -> Result<Vec<u8>> {
642 let query_analytics = self.get_query_analytics(time_range).await;
643 let user_analytics = self.get_user_analytics(time_range).await;
644 let health_analytics = self.get_health_analytics(time_range).await;
645
646 let mut workbook = Workbook::new();
648
649 let header_format = Format::new().set_bold();
651
652 let worksheet = workbook.add_worksheet();
654 worksheet.set_name("Query Analytics")?;
655
656 worksheet.write_string_with_format(0, 0, "Metric", &header_format)?;
657 worksheet.write_string_with_format(0, 1, "Value", &header_format)?;
658
659 let mut row = 1;
660 worksheet.write_string(row, 0, "Total Queries")?;
661 worksheet.write_number(row, 1, query_analytics.total_queries as f64)?;
662 row += 1;
663
664 worksheet.write_string(row, 0, "Successful Queries")?;
665 worksheet.write_number(row, 1, query_analytics.successful_queries as f64)?;
666 row += 1;
667
668 worksheet.write_string(row, 0, "Failed Queries")?;
669 worksheet.write_number(row, 1, query_analytics.failed_queries as f64)?;
670 row += 1;
671
672 worksheet.write_string(row, 0, "Avg Response Time (ms)")?;
673 worksheet.write_number(row, 1, query_analytics.avg_response_time_ms)?;
674 row += 1;
675
676 worksheet.write_string(row, 0, "P95 Response Time (ms)")?;
677 worksheet.write_number(row, 1, query_analytics.p95_response_time_ms)?;
678 row += 1;
679
680 worksheet.write_string(row, 0, "P99 Response Time (ms)")?;
681 worksheet.write_number(row, 1, query_analytics.p99_response_time_ms)?;
682
683 let worksheet = workbook.add_worksheet();
685 worksheet.set_name("Query Types")?;
686
687 worksheet.write_string_with_format(0, 0, "Query Type", &header_format)?;
688 worksheet.write_string_with_format(0, 1, "Count", &header_format)?;
689
690 for (row, (query_type, count)) in (1..).zip(query_analytics.query_type_distribution.iter())
691 {
692 worksheet.write_string(row, 0, format!("{:?}", query_type))?;
693 worksheet.write_number(row, 1, *count as f64)?;
694 }
695
696 let worksheet = workbook.add_worksheet();
698 worksheet.set_name("User Analytics")?;
699
700 worksheet.write_string_with_format(0, 0, "Metric", &header_format)?;
701 worksheet.write_string_with_format(0, 1, "Value", &header_format)?;
702
703 let mut row = 1;
704 worksheet.write_string(row, 0, "Active Users")?;
705 worksheet.write_number(row, 1, user_analytics.active_users as f64)?;
706 row += 1;
707
708 worksheet.write_string(row, 0, "Total Sessions")?;
709 worksheet.write_number(row, 1, user_analytics.total_sessions as f64)?;
710 row += 1;
711
712 worksheet.write_string(row, 0, "Avg Session Duration (secs)")?;
713 worksheet.write_number(row, 1, user_analytics.avg_session_duration_secs)?;
714
715 let worksheet = workbook.add_worksheet();
717 worksheet.set_name("Top Users")?;
718
719 worksheet.write_string_with_format(0, 0, "User ID", &header_format)?;
720 worksheet.write_string_with_format(0, 1, "Query Count", &header_format)?;
721 worksheet.write_string_with_format(0, 2, "Session Count", &header_format)?;
722 worksheet.write_string_with_format(0, 3, "Total Time (secs)", &header_format)?;
723 worksheet.write_string_with_format(0, 4, "Last Active", &header_format)?;
724
725 for (row, user) in (1..).zip(user_analytics.top_users.iter()) {
726 worksheet.write_string(row, 0, &user.user_id)?;
727 worksheet.write_number(row, 1, user.query_count as f64)?;
728 worksheet.write_number(row, 2, user.session_count as f64)?;
729 worksheet.write_number(row, 3, user.total_time_secs as f64)?;
730 worksheet.write_string(row, 4, user.last_active.to_rfc3339())?;
731 }
732
733 let worksheet = workbook.add_worksheet();
735 worksheet.set_name("Health Analytics")?;
736
737 worksheet.write_string_with_format(0, 0, "Metric", &header_format)?;
738 worksheet.write_string_with_format(0, 1, "Value", &header_format)?;
739
740 let mut row = 1;
741 worksheet.write_string(row, 0, "Current CPU (%)")?;
742 worksheet.write_number(row, 1, health_analytics.current_cpu_percent)?;
743 row += 1;
744
745 worksheet.write_string(row, 0, "Current Memory (MB)")?;
746 worksheet.write_number(row, 1, health_analytics.current_memory_mb)?;
747 row += 1;
748
749 worksheet.write_string(row, 0, "Active Connections")?;
750 worksheet.write_number(row, 1, health_analytics.active_connections as f64)?;
751 row += 1;
752
753 worksheet.write_string(row, 0, "Cache Hit Rate")?;
754 worksheet.write_number(row, 1, health_analytics.cache_hit_rate)?;
755 row += 1;
756
757 worksheet.write_string(row, 0, "Error Rate")?;
758 worksheet.write_number(row, 1, health_analytics.error_rate)?;
759
760 let worksheet = workbook.add_worksheet();
762 worksheet.set_name("Health Timeline")?;
763
764 worksheet.write_string_with_format(0, 0, "Timestamp", &header_format)?;
765 worksheet.write_string_with_format(0, 1, "CPU (%)", &header_format)?;
766 worksheet.write_string_with_format(0, 2, "Memory (MB)", &header_format)?;
767 worksheet.write_string_with_format(0, 3, "Active Connections", &header_format)?;
768 worksheet.write_string_with_format(0, 4, "Requests/Second", &header_format)?;
769
770 for (row, datapoint) in (1..).zip(health_analytics.health_timeline.iter()) {
771 worksheet.write_string(row, 0, datapoint.timestamp.to_rfc3339())?;
772 worksheet.write_number(row, 1, datapoint.cpu_percent)?;
773 worksheet.write_number(row, 2, datapoint.memory_mb)?;
774 worksheet.write_number(row, 3, datapoint.active_connections as f64)?;
775 worksheet.write_number(row, 4, datapoint.requests_per_second)?;
776 }
777
778 let worksheet = workbook.add_worksheet();
780 worksheet.set_name("Activity Timeline")?;
781
782 worksheet.write_string_with_format(0, 0, "Timestamp", &header_format)?;
783 worksheet.write_string_with_format(0, 1, "Active Users", &header_format)?;
784 worksheet.write_string_with_format(0, 2, "Queries/Min", &header_format)?;
785 worksheet.write_string_with_format(0, 3, "Avg Response Time (ms)", &header_format)?;
786
787 for (row, datapoint) in (1..).zip(user_analytics.activity_timeline.iter()) {
788 worksheet.write_string(row, 0, datapoint.timestamp.to_rfc3339())?;
789 worksheet.write_number(row, 1, datapoint.active_users as f64)?;
790 worksheet.write_number(row, 2, datapoint.queries_per_minute)?;
791 worksheet.write_number(row, 3, datapoint.avg_response_time_ms)?;
792 }
793
794 let buffer = workbook.save_to_buffer()?;
796 Ok(buffer)
797 }
798}
799
800#[cfg(test)]
801mod tests {
802 use super::*;
803
804 #[tokio::test]
805 async fn test_dashboard_creation() {
806 let config = DashboardConfig::default();
807 let dashboard = DashboardAnalytics::new(config);
808 let overview = dashboard.get_overview().await;
809
810 assert_eq!(overview.total_queries, 0);
811 assert_eq!(overview.active_users_24h, 0);
812 }
813
814 #[tokio::test]
815 async fn test_record_query() {
816 let config = DashboardConfig::default();
817 let dashboard = DashboardAnalytics::new(config);
818
819 let record = QueryRecord {
820 query_id: "test-query-1".to_string(),
821 query_type: QueryType::NaturalLanguage,
822 execution_time_ms: 150,
823 result_count: 5,
824 success: true,
825 timestamp: Utc::now(),
826 error: None,
827 };
828
829 dashboard.record_query(record).await;
830
831 let overview = dashboard.get_overview().await;
832 assert_eq!(overview.total_queries, 1);
833 assert_eq!(overview.successful_queries, 1);
834 }
835
836 #[tokio::test]
837 async fn test_time_range() {
838 let now = Utc::now();
839 let range = TimeRange {
840 start: now - Duration::hours(24),
841 end: now + Duration::hours(1), };
843
844 assert!(range.contains(now));
845 assert!(!range.contains(now - Duration::days(2)));
846 }
847
848 #[test]
849 fn test_percentile_calculation() {
850 let values = vec![1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0];
851 let p95 = DashboardAnalytics::calculate_percentile(&values, 0.95);
852 assert!(p95 >= 9.0);
853 }
854
855 #[tokio::test]
856 async fn test_csv_export_with_data() {
857 let config = DashboardConfig::default();
858 let dashboard = DashboardAnalytics::new(config);
859
860 dashboard
861 .record_query(QueryRecord {
862 query_id: "csv_test".to_string(),
863 query_type: QueryType::VectorSearch,
864 execution_time_ms: 75,
865 result_count: 20,
866 success: true,
867 timestamp: Utc::now(),
868 error: None,
869 })
870 .await;
871
872 let time_range = TimeRange::last_hours(24);
873 let csv_data = dashboard
874 .export_data(ExportFormat::Csv, time_range)
875 .await
876 .expect("should succeed");
877
878 let csv_str = String::from_utf8(csv_data).expect("should succeed");
879 assert!(csv_str.contains("=== QUERY ANALYTICS ==="));
880 assert!(csv_str.contains("Total Queries,1"));
881 }
882
883 #[tokio::test]
884 #[cfg(feature = "excel-export")]
885 async fn test_excel_export_with_data() {
886 let config = DashboardConfig::default();
887 let dashboard = DashboardAnalytics::new(config);
888
889 for i in 0..3 {
890 dashboard
891 .record_query(QueryRecord {
892 query_id: format!("excel_{}", i),
893 query_type: QueryType::Sparql,
894 execution_time_ms: 100,
895 result_count: 10,
896 success: true,
897 timestamp: Utc::now(),
898 error: None,
899 })
900 .await;
901 }
902
903 let time_range = TimeRange::last_days(1);
904 let excel_data = dashboard
905 .export_data(ExportFormat::Excel, time_range)
906 .await
907 .expect("should succeed");
908
909 assert!(!excel_data.is_empty());
910 assert_eq!(&excel_data[0..2], b"PK"); }
912
913 #[tokio::test]
914 async fn test_rps_calculation() {
915 let config = DashboardConfig::default();
916 let dashboard = DashboardAnalytics::new(config);
917
918 for _ in 0..5 {
919 dashboard
920 .record_query(QueryRecord {
921 query_id: format!("rps_{}", fastrand::u32(..)),
922 query_type: QueryType::Hybrid,
923 execution_time_ms: 50,
924 result_count: 5,
925 success: true,
926 timestamp: Utc::now(),
927 error: None,
928 })
929 .await;
930 }
931
932 dashboard.update_health(45.0, 500.0, 8).await;
933
934 let health = dashboard
935 .get_health_analytics(TimeRange::last_hours(1))
936 .await;
937 assert!(!health.health_timeline.is_empty());
938 }
939}