forge_runtime/observability/
alerts.rs

1//! Alert storage and evaluation engine.
2
3use std::collections::HashMap;
4use std::time::Duration;
5
6use chrono::{DateTime, Utc};
7use serde::{Deserialize, Serialize};
8use sqlx::Row;
9use sqlx::postgres::PgRow;
10use uuid::Uuid;
11
12use forge_core::Result;
13
14/// Alert severity levels.
15#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
16#[serde(rename_all = "lowercase")]
17pub enum AlertSeverity {
18    Info,
19    Warning,
20    Critical,
21}
22
23impl std::fmt::Display for AlertSeverity {
24    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
25        match self {
26            AlertSeverity::Info => write!(f, "info"),
27            AlertSeverity::Warning => write!(f, "warning"),
28            AlertSeverity::Critical => write!(f, "critical"),
29        }
30    }
31}
32
33impl std::str::FromStr for AlertSeverity {
34    type Err = String;
35
36    fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
37        match s.to_lowercase().as_str() {
38            "info" => Ok(AlertSeverity::Info),
39            "warning" => Ok(AlertSeverity::Warning),
40            "critical" => Ok(AlertSeverity::Critical),
41            _ => Err(format!("Unknown severity: {}", s)),
42        }
43    }
44}
45
46/// Alert condition operators.
47#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
48#[serde(rename_all = "lowercase")]
49pub enum AlertCondition {
50    /// Greater than
51    Gt,
52    /// Greater than or equal
53    Gte,
54    /// Less than
55    Lt,
56    /// Less than or equal
57    Lte,
58    /// Equal
59    Eq,
60    /// Not equal
61    Ne,
62}
63
64impl AlertCondition {
65    /// Evaluate the condition.
66    pub fn evaluate(&self, value: f64, threshold: f64) -> bool {
67        match self {
68            AlertCondition::Gt => value > threshold,
69            AlertCondition::Gte => value >= threshold,
70            AlertCondition::Lt => value < threshold,
71            AlertCondition::Lte => value <= threshold,
72            AlertCondition::Eq => (value - threshold).abs() < f64::EPSILON,
73            AlertCondition::Ne => (value - threshold).abs() >= f64::EPSILON,
74        }
75    }
76}
77
78impl std::fmt::Display for AlertCondition {
79    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
80        match self {
81            AlertCondition::Gt => write!(f, "gt"),
82            AlertCondition::Gte => write!(f, "gte"),
83            AlertCondition::Lt => write!(f, "lt"),
84            AlertCondition::Lte => write!(f, "lte"),
85            AlertCondition::Eq => write!(f, "eq"),
86            AlertCondition::Ne => write!(f, "ne"),
87        }
88    }
89}
90
91impl std::str::FromStr for AlertCondition {
92    type Err = String;
93
94    fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
95        match s.to_lowercase().as_str() {
96            "gt" | ">" => Ok(AlertCondition::Gt),
97            "gte" | ">=" => Ok(AlertCondition::Gte),
98            "lt" | "<" => Ok(AlertCondition::Lt),
99            "lte" | "<=" => Ok(AlertCondition::Lte),
100            "eq" | "==" => Ok(AlertCondition::Eq),
101            "ne" | "!=" => Ok(AlertCondition::Ne),
102            _ => Err(format!("Unknown condition: {}", s)),
103        }
104    }
105}
106
107/// Alert status.
108#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
109#[serde(rename_all = "lowercase")]
110pub enum AlertStatus {
111    Firing,
112    Resolved,
113}
114
115impl std::fmt::Display for AlertStatus {
116    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
117        match self {
118            AlertStatus::Firing => write!(f, "firing"),
119            AlertStatus::Resolved => write!(f, "resolved"),
120        }
121    }
122}
123
124/// Alert rule definition.
125#[derive(Debug, Clone, Serialize, Deserialize)]
126pub struct AlertRule {
127    pub id: Uuid,
128    pub name: String,
129    pub description: Option<String>,
130    pub metric_name: String,
131    pub condition: AlertCondition,
132    pub threshold: f64,
133    pub duration_seconds: i32,
134    pub severity: AlertSeverity,
135    pub enabled: bool,
136    pub labels: HashMap<String, String>,
137    pub notification_channels: Vec<String>,
138    pub cooldown_seconds: i32,
139    pub created_at: DateTime<Utc>,
140    pub updated_at: DateTime<Utc>,
141}
142
143impl AlertRule {
144    /// Create a new alert rule.
145    pub fn new(
146        name: impl Into<String>,
147        metric_name: impl Into<String>,
148        condition: AlertCondition,
149        threshold: f64,
150    ) -> Self {
151        let now = Utc::now();
152        Self {
153            id: Uuid::new_v4(),
154            name: name.into(),
155            description: None,
156            metric_name: metric_name.into(),
157            condition,
158            threshold,
159            duration_seconds: 0,
160            severity: AlertSeverity::Warning,
161            enabled: true,
162            labels: HashMap::new(),
163            notification_channels: Vec::new(),
164            cooldown_seconds: 300,
165            created_at: now,
166            updated_at: now,
167        }
168    }
169
170    /// Set description.
171    pub fn with_description(mut self, description: impl Into<String>) -> Self {
172        self.description = Some(description.into());
173        self
174    }
175
176    /// Set severity.
177    pub fn with_severity(mut self, severity: AlertSeverity) -> Self {
178        self.severity = severity;
179        self
180    }
181
182    /// Set duration (seconds that condition must be true).
183    pub fn with_duration(mut self, seconds: i32) -> Self {
184        self.duration_seconds = seconds;
185        self
186    }
187}
188
189/// A fired alert.
190#[derive(Debug, Clone, Serialize, Deserialize)]
191pub struct Alert {
192    pub id: Uuid,
193    pub rule_id: Uuid,
194    pub rule_name: String,
195    pub metric_value: f64,
196    pub threshold: f64,
197    pub severity: AlertSeverity,
198    pub status: AlertStatus,
199    pub triggered_at: DateTime<Utc>,
200    pub resolved_at: Option<DateTime<Utc>>,
201    pub acknowledged_at: Option<DateTime<Utc>>,
202    pub acknowledged_by: Option<String>,
203    pub labels: HashMap<String, String>,
204    pub annotations: HashMap<String, String>,
205}
206
207impl Alert {
208    /// Create a new firing alert.
209    pub fn firing(rule: &AlertRule, metric_value: f64) -> Self {
210        Self {
211            id: Uuid::new_v4(),
212            rule_id: rule.id,
213            rule_name: rule.name.clone(),
214            metric_value,
215            threshold: rule.threshold,
216            severity: rule.severity,
217            status: AlertStatus::Firing,
218            triggered_at: Utc::now(),
219            resolved_at: None,
220            acknowledged_at: None,
221            acknowledged_by: None,
222            labels: rule.labels.clone(),
223            annotations: HashMap::new(),
224        }
225    }
226}
227
228/// Alert store for persistence.
229pub struct AlertStore {
230    pool: sqlx::PgPool,
231}
232
233impl AlertStore {
234    /// Create a new alert store.
235    pub fn new(pool: sqlx::PgPool) -> Self {
236        Self { pool }
237    }
238
239    // ==================== Alert Rules ====================
240
241    /// Create an alert rule.
242    pub async fn create_rule(&self, rule: &AlertRule) -> Result<()> {
243        let labels = serde_json::to_value(&rule.labels).unwrap_or_default();
244
245        sqlx::query(
246            r#"
247            INSERT INTO forge_alert_rules
248            (id, name, description, metric_name, condition, threshold, duration_seconds,
249             severity, enabled, labels, notification_channels, cooldown_seconds,
250             created_at, updated_at)
251            VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)
252            "#,
253        )
254        .bind(rule.id)
255        .bind(&rule.name)
256        .bind(&rule.description)
257        .bind(&rule.metric_name)
258        .bind(rule.condition.to_string())
259        .bind(rule.threshold)
260        .bind(rule.duration_seconds)
261        .bind(rule.severity.to_string())
262        .bind(rule.enabled)
263        .bind(labels)
264        .bind(&rule.notification_channels)
265        .bind(rule.cooldown_seconds)
266        .bind(rule.created_at)
267        .bind(rule.updated_at)
268        .execute(&self.pool)
269        .await
270        .map_err(|e| forge_core::ForgeError::Database(e.to_string()))?;
271
272        Ok(())
273    }
274
275    /// List all alert rules.
276    pub async fn list_rules(&self) -> Result<Vec<AlertRule>> {
277        let rows = sqlx::query(
278            r#"
279            SELECT id, name, description, metric_name, condition, threshold,
280                   duration_seconds, severity, enabled, labels, notification_channels,
281                   cooldown_seconds, created_at, updated_at
282            FROM forge_alert_rules
283            ORDER BY name
284            "#,
285        )
286        .fetch_all(&self.pool)
287        .await
288        .map_err(|e| forge_core::ForgeError::Database(e.to_string()))?;
289
290        Ok(rows.into_iter().map(parse_alert_rule_row).collect())
291    }
292
293    /// List enabled alert rules.
294    pub async fn list_enabled_rules(&self) -> Result<Vec<AlertRule>> {
295        let rows = sqlx::query(
296            r#"
297            SELECT id, name, description, metric_name, condition, threshold,
298                   duration_seconds, severity, enabled, labels, notification_channels,
299                   cooldown_seconds, created_at, updated_at
300            FROM forge_alert_rules
301            WHERE enabled = TRUE
302            ORDER BY name
303            "#,
304        )
305        .fetch_all(&self.pool)
306        .await
307        .map_err(|e| forge_core::ForgeError::Database(e.to_string()))?;
308
309        Ok(rows.into_iter().map(parse_alert_rule_row).collect())
310    }
311
312    /// Get a rule by ID.
313    pub async fn get_rule(&self, id: Uuid) -> Result<Option<AlertRule>> {
314        let row = sqlx::query(
315            r#"
316            SELECT id, name, description, metric_name, condition, threshold,
317                   duration_seconds, severity, enabled, labels, notification_channels,
318                   cooldown_seconds, created_at, updated_at
319            FROM forge_alert_rules
320            WHERE id = $1
321            "#,
322        )
323        .bind(id)
324        .fetch_optional(&self.pool)
325        .await
326        .map_err(|e| forge_core::ForgeError::Database(e.to_string()))?;
327
328        Ok(row.map(parse_alert_rule_row))
329    }
330
331    /// Update an alert rule.
332    pub async fn update_rule(&self, rule: &AlertRule) -> Result<()> {
333        let labels = serde_json::to_value(&rule.labels).unwrap_or_default();
334
335        sqlx::query(
336            r#"
337            UPDATE forge_alert_rules
338            SET name = $2, description = $3, metric_name = $4, condition = $5,
339                threshold = $6, duration_seconds = $7, severity = $8, enabled = $9,
340                labels = $10, notification_channels = $11, cooldown_seconds = $12,
341                updated_at = NOW()
342            WHERE id = $1
343            "#,
344        )
345        .bind(rule.id)
346        .bind(&rule.name)
347        .bind(&rule.description)
348        .bind(&rule.metric_name)
349        .bind(rule.condition.to_string())
350        .bind(rule.threshold)
351        .bind(rule.duration_seconds)
352        .bind(rule.severity.to_string())
353        .bind(rule.enabled)
354        .bind(labels)
355        .bind(&rule.notification_channels)
356        .bind(rule.cooldown_seconds)
357        .execute(&self.pool)
358        .await
359        .map_err(|e| forge_core::ForgeError::Database(e.to_string()))?;
360
361        Ok(())
362    }
363
364    /// Delete an alert rule.
365    pub async fn delete_rule(&self, id: Uuid) -> Result<()> {
366        sqlx::query("DELETE FROM forge_alert_rules WHERE id = $1")
367            .bind(id)
368            .execute(&self.pool)
369            .await
370            .map_err(|e| forge_core::ForgeError::Database(e.to_string()))?;
371
372        Ok(())
373    }
374
375    // ==================== Alerts ====================
376
377    /// Create an alert.
378    pub async fn create_alert(&self, alert: &Alert) -> Result<()> {
379        let labels = serde_json::to_value(&alert.labels).unwrap_or_default();
380        let annotations = serde_json::to_value(&alert.annotations).unwrap_or_default();
381
382        sqlx::query(
383            r#"
384            INSERT INTO forge_alerts
385            (id, rule_id, rule_name, metric_value, threshold, severity, status,
386             triggered_at, resolved_at, acknowledged_at, acknowledged_by, labels, annotations)
387            VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)
388            "#,
389        )
390        .bind(alert.id)
391        .bind(alert.rule_id)
392        .bind(&alert.rule_name)
393        .bind(alert.metric_value)
394        .bind(alert.threshold)
395        .bind(alert.severity.to_string())
396        .bind(alert.status.to_string())
397        .bind(alert.triggered_at)
398        .bind(alert.resolved_at)
399        .bind(alert.acknowledged_at)
400        .bind(&alert.acknowledged_by)
401        .bind(labels)
402        .bind(annotations)
403        .execute(&self.pool)
404        .await
405        .map_err(|e| forge_core::ForgeError::Database(e.to_string()))?;
406
407        Ok(())
408    }
409
410    /// List active (firing) alerts.
411    pub async fn list_active_alerts(&self) -> Result<Vec<Alert>> {
412        let rows = sqlx::query(
413            r#"
414            SELECT id, rule_id, rule_name, metric_value, threshold, severity, status,
415                   triggered_at, resolved_at, acknowledged_at, acknowledged_by, labels, annotations
416            FROM forge_alerts
417            WHERE status = 'firing'
418            ORDER BY triggered_at DESC
419            "#,
420        )
421        .fetch_all(&self.pool)
422        .await
423        .map_err(|e| forge_core::ForgeError::Database(e.to_string()))?;
424
425        Ok(rows.into_iter().map(parse_alert_row).collect())
426    }
427
428    /// List recent alerts (both firing and resolved).
429    pub async fn list_recent_alerts(&self, limit: i64) -> Result<Vec<Alert>> {
430        let rows = sqlx::query(
431            r#"
432            SELECT id, rule_id, rule_name, metric_value, threshold, severity, status,
433                   triggered_at, resolved_at, acknowledged_at, acknowledged_by, labels, annotations
434            FROM forge_alerts
435            ORDER BY triggered_at DESC
436            LIMIT $1
437            "#,
438        )
439        .bind(limit)
440        .fetch_all(&self.pool)
441        .await
442        .map_err(|e| forge_core::ForgeError::Database(e.to_string()))?;
443
444        Ok(rows.into_iter().map(parse_alert_row).collect())
445    }
446
447    /// Resolve an alert.
448    pub async fn resolve_alert(&self, id: Uuid) -> Result<()> {
449        sqlx::query(
450            r#"
451            UPDATE forge_alerts
452            SET status = 'resolved', resolved_at = NOW()
453            WHERE id = $1
454            "#,
455        )
456        .bind(id)
457        .execute(&self.pool)
458        .await
459        .map_err(|e| forge_core::ForgeError::Database(e.to_string()))?;
460
461        Ok(())
462    }
463
464    /// Acknowledge an alert.
465    pub async fn acknowledge_alert(&self, id: Uuid, by: &str) -> Result<()> {
466        sqlx::query(
467            r#"
468            UPDATE forge_alerts
469            SET acknowledged_at = NOW(), acknowledged_by = $2
470            WHERE id = $1
471            "#,
472        )
473        .bind(id)
474        .bind(by)
475        .execute(&self.pool)
476        .await
477        .map_err(|e| forge_core::ForgeError::Database(e.to_string()))?;
478
479        Ok(())
480    }
481
482    /// Get last alert for a rule (to check cooldown).
483    pub async fn get_last_alert_for_rule(&self, rule_id: Uuid) -> Result<Option<Alert>> {
484        let row = sqlx::query(
485            r#"
486            SELECT id, rule_id, rule_name, metric_value, threshold, severity, status,
487                   triggered_at, resolved_at, acknowledged_at, acknowledged_by, labels, annotations
488            FROM forge_alerts
489            WHERE rule_id = $1
490            ORDER BY triggered_at DESC
491            LIMIT 1
492            "#,
493        )
494        .bind(rule_id)
495        .fetch_optional(&self.pool)
496        .await
497        .map_err(|e| forge_core::ForgeError::Database(e.to_string()))?;
498
499        Ok(row.map(parse_alert_row))
500    }
501
502    /// Cleanup old resolved alerts.
503    pub async fn cleanup(&self, retention: Duration) -> Result<u64> {
504        let cutoff = Utc::now() - chrono::Duration::from_std(retention).unwrap_or_default();
505
506        let result = sqlx::query(
507            r#"
508            DELETE FROM forge_alerts
509            WHERE status = 'resolved' AND resolved_at < $1
510            "#,
511        )
512        .bind(cutoff)
513        .execute(&self.pool)
514        .await
515        .map_err(|e| forge_core::ForgeError::Database(e.to_string()))?;
516
517        Ok(result.rows_affected())
518    }
519}
520
521/// Alert evaluator that periodically checks rules against metrics.
522pub struct AlertEvaluator {
523    alert_store: Arc<AlertStore>,
524    #[allow(dead_code)]
525    metrics_store: Arc<super::MetricsStore>,
526    pool: sqlx::PgPool,
527    shutdown: Arc<RwLock<bool>>,
528}
529
530impl AlertEvaluator {
531    /// Create a new alert evaluator.
532    pub fn new(
533        alert_store: Arc<AlertStore>,
534        metrics_store: Arc<super::MetricsStore>,
535        pool: sqlx::PgPool,
536    ) -> Self {
537        Self {
538            alert_store,
539            metrics_store,
540            pool,
541            shutdown: Arc::new(RwLock::new(false)),
542        }
543    }
544
545    /// Start the evaluation loop.
546    ///
547    /// This runs in the background and evaluates all enabled rules every `interval`.
548    pub async fn run(&self, interval: Duration) {
549        tracing::info!("Alert evaluator started");
550
551        let mut ticker = tokio::time::interval(interval);
552        loop {
553            ticker.tick().await;
554
555            if *self.shutdown.read().await {
556                break;
557            }
558
559            if let Err(e) = self.evaluate_all_rules().await {
560                tracing::error!("Alert evaluation error: {}", e);
561            }
562        }
563
564        tracing::info!("Alert evaluator stopped");
565    }
566
567    /// Stop the evaluator.
568    pub async fn stop(&self) {
569        let mut shutdown = self.shutdown.write().await;
570        *shutdown = true;
571    }
572
573    /// Evaluate all enabled rules.
574    async fn evaluate_all_rules(&self) -> Result<()> {
575        let rules = self.alert_store.list_enabled_rules().await?;
576
577        for rule in rules {
578            if let Err(e) = self.evaluate_rule(&rule).await {
579                tracing::warn!("Failed to evaluate rule {}: {}", rule.name, e);
580            }
581        }
582
583        Ok(())
584    }
585
586    /// Evaluate a single rule.
587    async fn evaluate_rule(&self, rule: &AlertRule) -> Result<()> {
588        // Get the latest metric value for this rule
589        let metric_value = self
590            .get_latest_metric_value(&rule.metric_name, &rule.labels)
591            .await?;
592
593        let metric_value = match metric_value {
594            Some(v) => v,
595            None => return Ok(()), // No metric data yet
596        };
597
598        // Evaluate the condition
599        let condition_met = rule.condition.evaluate(metric_value, rule.threshold);
600
601        // Check if there's an existing firing alert for this rule
602        let existing_alert = self.alert_store.get_last_alert_for_rule(rule.id).await?;
603
604        match (condition_met, existing_alert) {
605            (true, None) => {
606                // Condition met and no existing alert - create new alert
607                let alert = Alert::firing(rule, metric_value);
608                self.alert_store.create_alert(&alert).await?;
609                tracing::warn!(
610                    rule = rule.name,
611                    value = metric_value,
612                    threshold = rule.threshold,
613                    severity = ?rule.severity,
614                    "Alert triggered"
615                );
616            }
617            (true, Some(existing)) if existing.status == AlertStatus::Resolved => {
618                // Condition met again after resolution - check cooldown
619                let cooldown = chrono::Duration::seconds(rule.cooldown_seconds as i64);
620                let since_resolved = existing
621                    .resolved_at
622                    .map(|t| Utc::now() - t)
623                    .unwrap_or(cooldown);
624
625                if since_resolved >= cooldown {
626                    // Cooldown passed - create new alert
627                    let alert = Alert::firing(rule, metric_value);
628                    self.alert_store.create_alert(&alert).await?;
629                    tracing::warn!(
630                        rule = rule.name,
631                        value = metric_value,
632                        threshold = rule.threshold,
633                        "Alert re-triggered after cooldown"
634                    );
635                }
636            }
637            (false, Some(existing)) if existing.status == AlertStatus::Firing => {
638                // Condition no longer met - resolve alert
639                self.alert_store.resolve_alert(existing.id).await?;
640                tracing::info!(rule = rule.name, value = metric_value, "Alert resolved");
641            }
642            _ => {
643                // No action needed
644            }
645        }
646
647        Ok(())
648    }
649
650    /// Get the latest metric value matching the rule's criteria.
651    async fn get_latest_metric_value(
652        &self,
653        metric_name: &str,
654        _labels: &HashMap<String, String>,
655    ) -> Result<Option<f64>> {
656        // Query the latest metric value
657        let row: Option<(f64,)> = sqlx::query_as(
658            r#"
659            SELECT value
660            FROM forge_metrics
661            WHERE name = $1
662            ORDER BY timestamp DESC
663            LIMIT 1
664            "#,
665        )
666        .bind(metric_name)
667        .fetch_optional(&self.pool)
668        .await
669        .map_err(|e| forge_core::ForgeError::Database(e.to_string()))?;
670
671        Ok(row.map(|(v,)| v))
672    }
673}
674
675use std::sync::Arc;
676use tokio::sync::RwLock;
677
678// Manual row parsing functions
679fn parse_alert_rule_row(row: PgRow) -> AlertRule {
680    let labels_json: serde_json::Value = row.get("labels");
681    let labels: HashMap<String, String> = serde_json::from_value(labels_json).unwrap_or_default();
682    let condition_str: String = row.get("condition");
683    let severity_str: String = row.get("severity");
684
685    AlertRule {
686        id: row.get("id"),
687        name: row.get("name"),
688        description: row.get("description"),
689        metric_name: row.get("metric_name"),
690        condition: condition_str.parse().unwrap_or(AlertCondition::Gt),
691        threshold: row.get("threshold"),
692        duration_seconds: row.get("duration_seconds"),
693        severity: severity_str.parse().unwrap_or(AlertSeverity::Warning),
694        enabled: row.get("enabled"),
695        labels,
696        notification_channels: row.get("notification_channels"),
697        cooldown_seconds: row.get("cooldown_seconds"),
698        created_at: row.get("created_at"),
699        updated_at: row.get("updated_at"),
700    }
701}
702
703fn parse_alert_row(row: PgRow) -> Alert {
704    let labels_json: serde_json::Value = row.get("labels");
705    let annotations_json: serde_json::Value = row.get("annotations");
706    let labels: HashMap<String, String> = serde_json::from_value(labels_json).unwrap_or_default();
707    let annotations: HashMap<String, String> =
708        serde_json::from_value(annotations_json).unwrap_or_default();
709    let severity_str: String = row.get("severity");
710    let status_str: String = row.get("status");
711
712    Alert {
713        id: row.get("id"),
714        rule_id: row.get("rule_id"),
715        rule_name: row.get("rule_name"),
716        metric_value: row.get("metric_value"),
717        threshold: row.get("threshold"),
718        severity: severity_str.parse().unwrap_or(AlertSeverity::Warning),
719        status: if status_str == "firing" {
720            AlertStatus::Firing
721        } else {
722            AlertStatus::Resolved
723        },
724        triggered_at: row.get("triggered_at"),
725        resolved_at: row.get("resolved_at"),
726        acknowledged_at: row.get("acknowledged_at"),
727        acknowledged_by: row.get("acknowledged_by"),
728        labels,
729        annotations,
730    }
731}
732
733#[cfg(test)]
734mod tests {
735    use super::*;
736
737    #[test]
738    fn test_alert_condition_evaluate() {
739        assert!(AlertCondition::Gt.evaluate(10.0, 5.0));
740        assert!(!AlertCondition::Gt.evaluate(5.0, 10.0));
741
742        assert!(AlertCondition::Gte.evaluate(10.0, 10.0));
743        assert!(AlertCondition::Gte.evaluate(10.0, 5.0));
744
745        assert!(AlertCondition::Lt.evaluate(5.0, 10.0));
746        assert!(!AlertCondition::Lt.evaluate(10.0, 5.0));
747
748        assert!(AlertCondition::Lte.evaluate(10.0, 10.0));
749        assert!(AlertCondition::Lte.evaluate(5.0, 10.0));
750
751        assert!(AlertCondition::Eq.evaluate(10.0, 10.0));
752        assert!(!AlertCondition::Eq.evaluate(10.0, 5.0));
753
754        assert!(AlertCondition::Ne.evaluate(10.0, 5.0));
755        assert!(!AlertCondition::Ne.evaluate(10.0, 10.0));
756    }
757
758    #[test]
759    fn test_alert_rule_builder() {
760        let rule = AlertRule::new("high_cpu", "cpu_usage_percent", AlertCondition::Gt, 90.0)
761            .with_description("Alert when CPU usage exceeds 90%")
762            .with_severity(AlertSeverity::Critical)
763            .with_duration(60);
764
765        assert_eq!(rule.name, "high_cpu");
766        assert_eq!(rule.metric_name, "cpu_usage_percent");
767        assert_eq!(rule.threshold, 90.0);
768        assert_eq!(rule.severity, AlertSeverity::Critical);
769        assert_eq!(rule.duration_seconds, 60);
770    }
771
772    #[test]
773    fn test_alert_firing() {
774        let rule = AlertRule::new("test", "metric", AlertCondition::Gt, 50.0);
775        let alert = Alert::firing(&rule, 75.0);
776
777        assert_eq!(alert.rule_name, "test");
778        assert_eq!(alert.metric_value, 75.0);
779        assert_eq!(alert.threshold, 50.0);
780        assert_eq!(alert.status, AlertStatus::Firing);
781    }
782}