1use anyhow::Result;
7use chrono::{DateTime, Duration, Utc};
8use rust_xlsxwriter::{Format, Workbook};
9use serde::{Deserialize, Serialize};
10use std::collections::HashMap;
11use std::sync::Arc;
12use tokio::sync::RwLock;
13use tracing::info;
14
15pub struct DashboardAnalytics {
17 query_metrics: Arc<RwLock<QueryMetrics>>,
19 user_activity: Arc<RwLock<UserActivityTracker>>,
21 system_health: Arc<RwLock<SystemHealthMetrics>>,
23}
24
25#[derive(Debug, Clone, Serialize, Deserialize)]
27pub struct DashboardConfig {
28 pub retention_days: u32,
30 pub enable_realtime: bool,
32 pub aggregation_interval_secs: u64,
34 pub max_data_points: usize,
36}
37
38impl Default for DashboardConfig {
39 fn default() -> Self {
40 Self {
41 retention_days: 30,
42 enable_realtime: true,
43 aggregation_interval_secs: 300, max_data_points: 100,
45 }
46 }
47}
48
49#[derive(Debug, Clone, Default)]
51pub struct QueryMetrics {
52 pub total_queries: u64,
54 pub successful_queries: u64,
56 pub failed_queries: u64,
58 pub avg_response_time_ms: f64,
60 pub p95_response_time_ms: f64,
62 pub p99_response_time_ms: f64,
64 pub query_history: Vec<QueryRecord>,
66}
67
68#[derive(Debug, Clone, Serialize, Deserialize)]
70pub struct QueryRecord {
71 pub query_id: String,
72 pub query_type: QueryType,
73 pub execution_time_ms: u64,
74 pub result_count: usize,
75 pub success: bool,
76 pub timestamp: DateTime<Utc>,
77 pub error: Option<String>,
78}
79
80#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Hash)]
82#[serde(rename_all = "snake_case")]
83pub enum QueryType {
84 NaturalLanguage,
85 Sparql,
86 VectorSearch,
87 Hybrid,
88}
89
90#[derive(Debug, Clone, Default)]
92pub struct UserActivityTracker {
93 pub active_users_24h: u64,
95 pub total_sessions: u64,
97 pub avg_session_duration_secs: f64,
99 pub activity_timeline: Vec<ActivityDataPoint>,
101 pub top_users: Vec<UserActivity>,
103}
104
105#[derive(Debug, Clone, Serialize, Deserialize)]
107pub struct ActivityDataPoint {
108 pub timestamp: DateTime<Utc>,
109 pub active_users: u64,
110 pub queries_per_minute: f64,
111 pub avg_response_time_ms: f64,
112}
113
114#[derive(Debug, Clone, Serialize, Deserialize)]
116pub struct UserActivity {
117 pub user_id: String,
118 pub query_count: u64,
119 pub session_count: u64,
120 pub total_time_secs: u64,
121 pub last_active: DateTime<Utc>,
122}
123
124#[derive(Debug, Clone, Default)]
126pub struct SystemHealthMetrics {
127 pub cpu_usage_percent: f64,
129 pub memory_usage_mb: f64,
131 pub active_connections: u64,
133 pub cache_hit_rate: f64,
135 pub error_rate: f64,
137 pub health_timeline: Vec<HealthDataPoint>,
139}
140
141#[derive(Debug, Clone, Serialize, Deserialize)]
143pub struct HealthDataPoint {
144 pub timestamp: DateTime<Utc>,
145 pub cpu_percent: f64,
146 pub memory_mb: f64,
147 pub active_connections: u64,
148 pub requests_per_second: f64,
149}
150
151impl DashboardAnalytics {
152 pub fn new(config: DashboardConfig) -> Self {
154 info!(
155 "Initializing dashboard analytics with retention: {} days",
156 config.retention_days
157 );
158
159 Self {
160 query_metrics: Arc::new(RwLock::new(QueryMetrics::default())),
161 user_activity: Arc::new(RwLock::new(UserActivityTracker::default())),
162 system_health: Arc::new(RwLock::new(SystemHealthMetrics::default())),
163 }
164 }
165
166 pub async fn get_overview(&self) -> DashboardOverview {
168 let query_metrics = self.query_metrics.read().await;
169 let user_activity = self.user_activity.read().await;
170 let system_health = self.system_health.read().await;
171
172 DashboardOverview {
173 total_queries: query_metrics.total_queries,
174 successful_queries: query_metrics.successful_queries,
175 failed_queries: query_metrics.failed_queries,
176 avg_response_time_ms: query_metrics.avg_response_time_ms,
177 active_users_24h: user_activity.active_users_24h,
178 total_sessions: user_activity.total_sessions,
179 cpu_usage_percent: system_health.cpu_usage_percent,
180 memory_usage_mb: system_health.memory_usage_mb,
181 cache_hit_rate: system_health.cache_hit_rate,
182 error_rate: system_health.error_rate,
183 timestamp: Utc::now(),
184 }
185 }
186
187 pub async fn get_query_analytics(&self, time_range: TimeRange) -> QueryAnalytics {
189 let metrics = self.query_metrics.read().await;
190
191 let filtered_queries: Vec<_> = metrics
193 .query_history
194 .iter()
195 .filter(|q| time_range.contains(q.timestamp))
196 .cloned()
197 .collect();
198
199 let total = filtered_queries.len() as u64;
201 let successful = filtered_queries.iter().filter(|q| q.success).count() as u64;
202 let failed = total - successful;
203
204 let execution_times: Vec<f64> = filtered_queries
205 .iter()
206 .map(|q| q.execution_time_ms as f64)
207 .collect();
208
209 let avg_time = if !execution_times.is_empty() {
210 execution_times.iter().sum::<f64>() / execution_times.len() as f64
211 } else {
212 0.0
213 };
214
215 let mut type_distribution = HashMap::new();
217 for query in &filtered_queries {
218 *type_distribution.entry(query.query_type).or_insert(0) += 1;
219 }
220
221 QueryAnalytics {
222 total_queries: total,
223 successful_queries: successful,
224 failed_queries: failed,
225 avg_response_time_ms: avg_time,
226 p95_response_time_ms: Self::calculate_percentile(&execution_times, 0.95),
227 p99_response_time_ms: Self::calculate_percentile(&execution_times, 0.99),
228 query_type_distribution: type_distribution,
229 time_range,
230 }
231 }
232
233 pub async fn get_user_analytics(&self, time_range: TimeRange) -> UserAnalytics {
235 let activity = self.user_activity.read().await;
236
237 let filtered_timeline: Vec<_> = activity
239 .activity_timeline
240 .iter()
241 .filter(|a| time_range.contains(a.timestamp))
242 .cloned()
243 .collect();
244
245 UserAnalytics {
246 active_users: activity.active_users_24h,
247 total_sessions: activity.total_sessions,
248 avg_session_duration_secs: activity.avg_session_duration_secs,
249 activity_timeline: filtered_timeline,
250 top_users: activity.top_users.clone(),
251 time_range,
252 }
253 }
254
255 pub async fn get_health_analytics(&self, time_range: TimeRange) -> HealthAnalytics {
257 let health = self.system_health.read().await;
258
259 let filtered_timeline: Vec<_> = health
261 .health_timeline
262 .iter()
263 .filter(|h| time_range.contains(h.timestamp))
264 .cloned()
265 .collect();
266
267 HealthAnalytics {
268 current_cpu_percent: health.cpu_usage_percent,
269 current_memory_mb: health.memory_usage_mb,
270 active_connections: health.active_connections,
271 cache_hit_rate: health.cache_hit_rate,
272 error_rate: health.error_rate,
273 health_timeline: filtered_timeline,
274 time_range,
275 }
276 }
277
278 pub async fn record_query(&self, record: QueryRecord) {
280 let mut metrics = self.query_metrics.write().await;
281
282 metrics.total_queries += 1;
283 if record.success {
284 metrics.successful_queries += 1;
285 } else {
286 metrics.failed_queries += 1;
287 }
288
289 let total_time = metrics.avg_response_time_ms * (metrics.total_queries - 1) as f64
291 + record.execution_time_ms as f64;
292 metrics.avg_response_time_ms = total_time / metrics.total_queries as f64;
293
294 metrics.query_history.push(record);
295
296 if metrics.query_history.len() > 10_000 {
298 metrics.query_history.drain(0..1_000);
299 }
300 }
301
302 pub async fn update_user_activity(&self, user_id: String, query_count: u64) {
304 let mut activity = self.user_activity.write().await;
305
306 if let Some(user) = activity.top_users.iter_mut().find(|u| u.user_id == user_id) {
308 user.query_count += query_count;
309 user.last_active = Utc::now();
310 } else {
311 activity.top_users.push(UserActivity {
312 user_id,
313 query_count,
314 session_count: 1,
315 total_time_secs: 0,
316 last_active: Utc::now(),
317 });
318 }
319
320 activity
322 .top_users
323 .sort_by(|a, b| b.query_count.cmp(&a.query_count));
324 activity.top_users.truncate(100);
325 }
326
327 pub async fn update_health(&self, cpu_percent: f64, memory_mb: f64, connections: u64) {
329 let mut health = self.system_health.write().await;
330
331 health.cpu_usage_percent = cpu_percent;
332 health.memory_usage_mb = memory_mb;
333 health.active_connections = connections;
334
335 let requests_per_second = self.calculate_requests_per_second().await;
337
338 health.health_timeline.push(HealthDataPoint {
340 timestamp: Utc::now(),
341 cpu_percent,
342 memory_mb,
343 active_connections: connections,
344 requests_per_second,
345 });
346
347 if health.health_timeline.len() > 288 {
349 health.health_timeline.drain(0..100);
350 }
351 }
352
353 async fn calculate_requests_per_second(&self) -> f64 {
355 let metrics = self.query_metrics.read().await;
356
357 let now = Utc::now();
359 let one_minute_ago = now - Duration::seconds(60);
360
361 let recent_queries = metrics
362 .query_history
363 .iter()
364 .filter(|q| q.timestamp >= one_minute_ago)
365 .count();
366
367 recent_queries as f64 / 60.0
369 }
370
371 fn calculate_percentile(values: &[f64], percentile: f64) -> f64 {
373 if values.is_empty() {
374 return 0.0;
375 }
376
377 let mut sorted = values.to_vec();
378 sorted.sort_by(|a, b| a.partial_cmp(b).unwrap());
379
380 let index = (percentile * sorted.len() as f64) as usize;
381 sorted.get(index).copied().unwrap_or(0.0)
382 }
383}
384
385#[derive(Debug, Clone, Serialize, Deserialize)]
387pub struct DashboardOverview {
388 pub total_queries: u64,
389 pub successful_queries: u64,
390 pub failed_queries: u64,
391 pub avg_response_time_ms: f64,
392 pub active_users_24h: u64,
393 pub total_sessions: u64,
394 pub cpu_usage_percent: f64,
395 pub memory_usage_mb: f64,
396 pub cache_hit_rate: f64,
397 pub error_rate: f64,
398 pub timestamp: DateTime<Utc>,
399}
400
401#[derive(Debug, Clone, Serialize, Deserialize)]
403pub struct QueryAnalytics {
404 pub total_queries: u64,
405 pub successful_queries: u64,
406 pub failed_queries: u64,
407 pub avg_response_time_ms: f64,
408 pub p95_response_time_ms: f64,
409 pub p99_response_time_ms: f64,
410 pub query_type_distribution: HashMap<QueryType, u64>,
411 pub time_range: TimeRange,
412}
413
414#[derive(Debug, Clone, Serialize, Deserialize)]
416pub struct UserAnalytics {
417 pub active_users: u64,
418 pub total_sessions: u64,
419 pub avg_session_duration_secs: f64,
420 pub activity_timeline: Vec<ActivityDataPoint>,
421 pub top_users: Vec<UserActivity>,
422 pub time_range: TimeRange,
423}
424
425#[derive(Debug, Clone, Serialize, Deserialize)]
427pub struct HealthAnalytics {
428 pub current_cpu_percent: f64,
429 pub current_memory_mb: f64,
430 pub active_connections: u64,
431 pub cache_hit_rate: f64,
432 pub error_rate: f64,
433 pub health_timeline: Vec<HealthDataPoint>,
434 pub time_range: TimeRange,
435}
436
437#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
439pub struct TimeRange {
440 pub start: DateTime<Utc>,
441 pub end: DateTime<Utc>,
442}
443
444impl TimeRange {
445 pub fn last_hours(hours: i64) -> Self {
447 let end = Utc::now();
448 let start = end - Duration::hours(hours);
449 Self { start, end }
450 }
451
452 pub fn last_days(days: i64) -> Self {
454 let end = Utc::now();
455 let start = end - Duration::days(days);
456 Self { start, end }
457 }
458
459 pub fn contains(&self, timestamp: DateTime<Utc>) -> bool {
461 timestamp >= self.start && timestamp <= self.end
462 }
463}
464
465#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
467#[serde(rename_all = "snake_case")]
468pub enum ExportFormat {
469 Json,
470 Csv,
471 Excel,
472}
473
474impl DashboardAnalytics {
475 pub async fn export_data(
477 &self,
478 format: ExportFormat,
479 time_range: TimeRange,
480 ) -> Result<Vec<u8>> {
481 match format {
482 ExportFormat::Json => self.export_json(time_range).await,
483 ExportFormat::Csv => self.export_csv(time_range).await,
484 ExportFormat::Excel => self.export_excel(time_range).await,
485 }
486 }
487
488 async fn export_json(&self, time_range: TimeRange) -> Result<Vec<u8>> {
489 let overview = self.get_overview().await;
490 let query_analytics = self.get_query_analytics(time_range).await;
491 let user_analytics = self.get_user_analytics(time_range).await;
492 let health_analytics = self.get_health_analytics(time_range).await;
493
494 let export_data = serde_json::json!({
495 "overview": overview,
496 "query_analytics": query_analytics,
497 "user_analytics": user_analytics,
498 "health_analytics": health_analytics,
499 });
500
501 Ok(serde_json::to_vec_pretty(&export_data)?)
502 }
503
504 async fn export_csv(&self, time_range: TimeRange) -> Result<Vec<u8>> {
505 let query_analytics = self.get_query_analytics(time_range).await;
506 let user_analytics = self.get_user_analytics(time_range).await;
507 let health_analytics = self.get_health_analytics(time_range).await;
508
509 let mut csv_output = String::new();
510
511 csv_output.push_str("=== QUERY ANALYTICS ===\n");
513 csv_output.push_str("Metric,Value\n");
514 csv_output.push_str(&format!(
515 "Total Queries,{}\n",
516 query_analytics.total_queries
517 ));
518 csv_output.push_str(&format!(
519 "Successful Queries,{}\n",
520 query_analytics.successful_queries
521 ));
522 csv_output.push_str(&format!(
523 "Failed Queries,{}\n",
524 query_analytics.failed_queries
525 ));
526 csv_output.push_str(&format!(
527 "Average Response Time (ms),{:.2}\n",
528 query_analytics.avg_response_time_ms
529 ));
530 csv_output.push_str(&format!(
531 "P95 Response Time (ms),{:.2}\n",
532 query_analytics.p95_response_time_ms
533 ));
534 csv_output.push_str(&format!(
535 "P99 Response Time (ms),{:.2}\n",
536 query_analytics.p99_response_time_ms
537 ));
538 csv_output.push('\n');
539
540 csv_output.push_str("=== QUERY TYPE DISTRIBUTION ===\n");
542 csv_output.push_str("Query Type,Count\n");
543 for (query_type, count) in &query_analytics.query_type_distribution {
544 csv_output.push_str(&format!("{:?},{}\n", query_type, count));
545 }
546 csv_output.push('\n');
547
548 csv_output.push_str("=== USER ANALYTICS ===\n");
550 csv_output.push_str("Metric,Value\n");
551 csv_output.push_str(&format!("Active Users,{}\n", user_analytics.active_users));
552 csv_output.push_str(&format!(
553 "Total Sessions,{}\n",
554 user_analytics.total_sessions
555 ));
556 csv_output.push_str(&format!(
557 "Avg Session Duration (secs),{:.2}\n",
558 user_analytics.avg_session_duration_secs
559 ));
560 csv_output.push('\n');
561
562 csv_output.push_str("=== TOP USERS ===\n");
564 csv_output.push_str("User ID,Query Count,Session Count,Total Time (secs),Last Active\n");
565 for user in &user_analytics.top_users {
566 csv_output.push_str(&format!(
567 "{},{},{},{},{}\n",
568 user.user_id,
569 user.query_count,
570 user.session_count,
571 user.total_time_secs,
572 user.last_active.to_rfc3339()
573 ));
574 }
575 csv_output.push('\n');
576
577 csv_output.push_str("=== HEALTH ANALYTICS ===\n");
579 csv_output.push_str("Metric,Value\n");
580 csv_output.push_str(&format!(
581 "Current CPU (%),{:.2}\n",
582 health_analytics.current_cpu_percent
583 ));
584 csv_output.push_str(&format!(
585 "Current Memory (MB),{:.2}\n",
586 health_analytics.current_memory_mb
587 ));
588 csv_output.push_str(&format!(
589 "Active Connections,{}\n",
590 health_analytics.active_connections
591 ));
592 csv_output.push_str(&format!(
593 "Cache Hit Rate,{:.2}\n",
594 health_analytics.cache_hit_rate
595 ));
596 csv_output.push_str(&format!("Error Rate,{:.2}\n", health_analytics.error_rate));
597 csv_output.push('\n');
598
599 csv_output.push_str("=== HEALTH TIMELINE ===\n");
601 csv_output.push_str("Timestamp,CPU (%),Memory (MB),Active Connections,Requests/Second\n");
602 for datapoint in &health_analytics.health_timeline {
603 csv_output.push_str(&format!(
604 "{},{:.2},{:.2},{},{:.2}\n",
605 datapoint.timestamp.to_rfc3339(),
606 datapoint.cpu_percent,
607 datapoint.memory_mb,
608 datapoint.active_connections,
609 datapoint.requests_per_second
610 ));
611 }
612 csv_output.push('\n');
613
614 csv_output.push_str("=== ACTIVITY TIMELINE ===\n");
616 csv_output.push_str("Timestamp,Active Users,Queries/Min,Avg Response Time (ms)\n");
617 for datapoint in &user_analytics.activity_timeline {
618 csv_output.push_str(&format!(
619 "{},{},{:.2},{:.2}\n",
620 datapoint.timestamp.to_rfc3339(),
621 datapoint.active_users,
622 datapoint.queries_per_minute,
623 datapoint.avg_response_time_ms
624 ));
625 }
626
627 Ok(csv_output.into_bytes())
628 }
629
630 async fn export_excel(&self, time_range: TimeRange) -> Result<Vec<u8>> {
631 let query_analytics = self.get_query_analytics(time_range).await;
632 let user_analytics = self.get_user_analytics(time_range).await;
633 let health_analytics = self.get_health_analytics(time_range).await;
634
635 let mut workbook = Workbook::new();
637
638 let header_format = Format::new().set_bold();
640
641 let worksheet = workbook.add_worksheet();
643 worksheet.set_name("Query Analytics")?;
644
645 worksheet.write_string_with_format(0, 0, "Metric", &header_format)?;
646 worksheet.write_string_with_format(0, 1, "Value", &header_format)?;
647
648 let mut row = 1;
649 worksheet.write_string(row, 0, "Total Queries")?;
650 worksheet.write_number(row, 1, query_analytics.total_queries as f64)?;
651 row += 1;
652
653 worksheet.write_string(row, 0, "Successful Queries")?;
654 worksheet.write_number(row, 1, query_analytics.successful_queries as f64)?;
655 row += 1;
656
657 worksheet.write_string(row, 0, "Failed Queries")?;
658 worksheet.write_number(row, 1, query_analytics.failed_queries as f64)?;
659 row += 1;
660
661 worksheet.write_string(row, 0, "Avg Response Time (ms)")?;
662 worksheet.write_number(row, 1, query_analytics.avg_response_time_ms)?;
663 row += 1;
664
665 worksheet.write_string(row, 0, "P95 Response Time (ms)")?;
666 worksheet.write_number(row, 1, query_analytics.p95_response_time_ms)?;
667 row += 1;
668
669 worksheet.write_string(row, 0, "P99 Response Time (ms)")?;
670 worksheet.write_number(row, 1, query_analytics.p99_response_time_ms)?;
671
672 let worksheet = workbook.add_worksheet();
674 worksheet.set_name("Query Types")?;
675
676 worksheet.write_string_with_format(0, 0, "Query Type", &header_format)?;
677 worksheet.write_string_with_format(0, 1, "Count", &header_format)?;
678
679 let mut row = 1;
680 for (query_type, count) in &query_analytics.query_type_distribution {
681 worksheet.write_string(row, 0, format!("{:?}", query_type))?;
682 worksheet.write_number(row, 1, *count as f64)?;
683 row += 1;
684 }
685
686 let worksheet = workbook.add_worksheet();
688 worksheet.set_name("User Analytics")?;
689
690 worksheet.write_string_with_format(0, 0, "Metric", &header_format)?;
691 worksheet.write_string_with_format(0, 1, "Value", &header_format)?;
692
693 let mut row = 1;
694 worksheet.write_string(row, 0, "Active Users")?;
695 worksheet.write_number(row, 1, user_analytics.active_users as f64)?;
696 row += 1;
697
698 worksheet.write_string(row, 0, "Total Sessions")?;
699 worksheet.write_number(row, 1, user_analytics.total_sessions as f64)?;
700 row += 1;
701
702 worksheet.write_string(row, 0, "Avg Session Duration (secs)")?;
703 worksheet.write_number(row, 1, user_analytics.avg_session_duration_secs)?;
704
705 let worksheet = workbook.add_worksheet();
707 worksheet.set_name("Top Users")?;
708
709 worksheet.write_string_with_format(0, 0, "User ID", &header_format)?;
710 worksheet.write_string_with_format(0, 1, "Query Count", &header_format)?;
711 worksheet.write_string_with_format(0, 2, "Session Count", &header_format)?;
712 worksheet.write_string_with_format(0, 3, "Total Time (secs)", &header_format)?;
713 worksheet.write_string_with_format(0, 4, "Last Active", &header_format)?;
714
715 let mut row = 1;
716 for user in &user_analytics.top_users {
717 worksheet.write_string(row, 0, &user.user_id)?;
718 worksheet.write_number(row, 1, user.query_count as f64)?;
719 worksheet.write_number(row, 2, user.session_count as f64)?;
720 worksheet.write_number(row, 3, user.total_time_secs as f64)?;
721 worksheet.write_string(row, 4, user.last_active.to_rfc3339())?;
722 row += 1;
723 }
724
725 let worksheet = workbook.add_worksheet();
727 worksheet.set_name("Health Analytics")?;
728
729 worksheet.write_string_with_format(0, 0, "Metric", &header_format)?;
730 worksheet.write_string_with_format(0, 1, "Value", &header_format)?;
731
732 let mut row = 1;
733 worksheet.write_string(row, 0, "Current CPU (%)")?;
734 worksheet.write_number(row, 1, health_analytics.current_cpu_percent)?;
735 row += 1;
736
737 worksheet.write_string(row, 0, "Current Memory (MB)")?;
738 worksheet.write_number(row, 1, health_analytics.current_memory_mb)?;
739 row += 1;
740
741 worksheet.write_string(row, 0, "Active Connections")?;
742 worksheet.write_number(row, 1, health_analytics.active_connections as f64)?;
743 row += 1;
744
745 worksheet.write_string(row, 0, "Cache Hit Rate")?;
746 worksheet.write_number(row, 1, health_analytics.cache_hit_rate)?;
747 row += 1;
748
749 worksheet.write_string(row, 0, "Error Rate")?;
750 worksheet.write_number(row, 1, health_analytics.error_rate)?;
751
752 let worksheet = workbook.add_worksheet();
754 worksheet.set_name("Health Timeline")?;
755
756 worksheet.write_string_with_format(0, 0, "Timestamp", &header_format)?;
757 worksheet.write_string_with_format(0, 1, "CPU (%)", &header_format)?;
758 worksheet.write_string_with_format(0, 2, "Memory (MB)", &header_format)?;
759 worksheet.write_string_with_format(0, 3, "Active Connections", &header_format)?;
760 worksheet.write_string_with_format(0, 4, "Requests/Second", &header_format)?;
761
762 let mut row = 1;
763 for datapoint in &health_analytics.health_timeline {
764 worksheet.write_string(row, 0, datapoint.timestamp.to_rfc3339())?;
765 worksheet.write_number(row, 1, datapoint.cpu_percent)?;
766 worksheet.write_number(row, 2, datapoint.memory_mb)?;
767 worksheet.write_number(row, 3, datapoint.active_connections as f64)?;
768 worksheet.write_number(row, 4, datapoint.requests_per_second)?;
769 row += 1;
770 }
771
772 let worksheet = workbook.add_worksheet();
774 worksheet.set_name("Activity Timeline")?;
775
776 worksheet.write_string_with_format(0, 0, "Timestamp", &header_format)?;
777 worksheet.write_string_with_format(0, 1, "Active Users", &header_format)?;
778 worksheet.write_string_with_format(0, 2, "Queries/Min", &header_format)?;
779 worksheet.write_string_with_format(0, 3, "Avg Response Time (ms)", &header_format)?;
780
781 let mut row = 1;
782 for datapoint in &user_analytics.activity_timeline {
783 worksheet.write_string(row, 0, datapoint.timestamp.to_rfc3339())?;
784 worksheet.write_number(row, 1, datapoint.active_users as f64)?;
785 worksheet.write_number(row, 2, datapoint.queries_per_minute)?;
786 worksheet.write_number(row, 3, datapoint.avg_response_time_ms)?;
787 row += 1;
788 }
789
790 let buffer = workbook.save_to_buffer()?;
792 Ok(buffer)
793 }
794}
795
796#[cfg(test)]
797mod tests {
798 use super::*;
799
800 #[tokio::test]
801 async fn test_dashboard_creation() {
802 let config = DashboardConfig::default();
803 let dashboard = DashboardAnalytics::new(config);
804 let overview = dashboard.get_overview().await;
805
806 assert_eq!(overview.total_queries, 0);
807 assert_eq!(overview.active_users_24h, 0);
808 }
809
810 #[tokio::test]
811 async fn test_record_query() {
812 let config = DashboardConfig::default();
813 let dashboard = DashboardAnalytics::new(config);
814
815 let record = QueryRecord {
816 query_id: "test-query-1".to_string(),
817 query_type: QueryType::NaturalLanguage,
818 execution_time_ms: 150,
819 result_count: 5,
820 success: true,
821 timestamp: Utc::now(),
822 error: None,
823 };
824
825 dashboard.record_query(record).await;
826
827 let overview = dashboard.get_overview().await;
828 assert_eq!(overview.total_queries, 1);
829 assert_eq!(overview.successful_queries, 1);
830 }
831
832 #[tokio::test]
833 async fn test_time_range() {
834 let now = Utc::now();
835 let range = TimeRange {
836 start: now - Duration::hours(24),
837 end: now + Duration::hours(1), };
839
840 assert!(range.contains(now));
841 assert!(!range.contains(now - Duration::days(2)));
842 }
843
844 #[test]
845 fn test_percentile_calculation() {
846 let values = vec![1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0];
847 let p95 = DashboardAnalytics::calculate_percentile(&values, 0.95);
848 assert!(p95 >= 9.0);
849 }
850
851 #[tokio::test]
852 async fn test_csv_export_with_data() {
853 let config = DashboardConfig::default();
854 let dashboard = DashboardAnalytics::new(config);
855
856 dashboard
857 .record_query(QueryRecord {
858 query_id: "csv_test".to_string(),
859 query_type: QueryType::VectorSearch,
860 execution_time_ms: 75,
861 result_count: 20,
862 success: true,
863 timestamp: Utc::now(),
864 error: None,
865 })
866 .await;
867
868 let time_range = TimeRange::last_hours(24);
869 let csv_data = dashboard
870 .export_data(ExportFormat::Csv, time_range)
871 .await
872 .unwrap();
873
874 let csv_str = String::from_utf8(csv_data).unwrap();
875 assert!(csv_str.contains("=== QUERY ANALYTICS ==="));
876 assert!(csv_str.contains("Total Queries,1"));
877 }
878
879 #[tokio::test]
880 async fn test_excel_export_with_data() {
881 let config = DashboardConfig::default();
882 let dashboard = DashboardAnalytics::new(config);
883
884 for i in 0..3 {
885 dashboard
886 .record_query(QueryRecord {
887 query_id: format!("excel_{}", i),
888 query_type: QueryType::Sparql,
889 execution_time_ms: 100,
890 result_count: 10,
891 success: true,
892 timestamp: Utc::now(),
893 error: None,
894 })
895 .await;
896 }
897
898 let time_range = TimeRange::last_days(1);
899 let excel_data = dashboard
900 .export_data(ExportFormat::Excel, time_range)
901 .await
902 .unwrap();
903
904 assert!(!excel_data.is_empty());
905 assert_eq!(&excel_data[0..2], b"PK"); }
907
908 #[tokio::test]
909 async fn test_rps_calculation() {
910 let config = DashboardConfig::default();
911 let dashboard = DashboardAnalytics::new(config);
912
913 for _ in 0..5 {
914 dashboard
915 .record_query(QueryRecord {
916 query_id: format!("rps_{}", fastrand::u32(..)),
917 query_type: QueryType::Hybrid,
918 execution_time_ms: 50,
919 result_count: 5,
920 success: true,
921 timestamp: Utc::now(),
922 error: None,
923 })
924 .await;
925 }
926
927 dashboard.update_health(45.0, 500.0, 8).await;
928
929 let health = dashboard
930 .get_health_analytics(TimeRange::last_hours(1))
931 .await;
932 assert!(!health.health_timeline.is_empty());
933 }
934}