1use super::{SecurityEvent, SecurityEventSeverity, SecurityEventType};
4use crate::errors::Result;
5use serde::{Deserialize, Serialize};
6use std::collections::HashMap;
7#[derive(Debug, Clone, Serialize, Deserialize)]
9pub struct AlertConfig {
10 pub enabled: bool,
12 pub thresholds: AlertThresholds,
14 pub channels: Vec<NotificationChannel>,
16}
17
18#[derive(Debug, Clone, Serialize, Deserialize)]
20pub struct AlertThresholds {
21 pub failed_logins_per_minute: u64,
23 pub max_response_time_ms: u64,
25 pub error_rate_threshold: f64,
27 pub alert_cooldown_seconds: u64,
29}
30
31impl Default for AlertThresholds {
32 fn default() -> Self {
33 Self {
34 failed_logins_per_minute: 10,
35 max_response_time_ms: 5000,
36 error_rate_threshold: 0.1, alert_cooldown_seconds: 300, }
39 }
40}
41
42#[derive(Debug, Clone, Serialize, Deserialize)]
44pub enum NotificationChannel {
45 Email { recipients: Vec<String> },
47 Slack { webhook_url: String },
49 Teams { webhook_url: String },
51 Webhook {
53 url: String,
54 headers: HashMap<String, String>,
55 },
56 Log { level: String },
58}
59
60#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, PartialOrd)]
62pub enum AlertSeverity {
63 Info,
64 Warning,
65 Critical,
66}
67
68#[derive(Debug, Clone, Serialize, Deserialize)]
70pub struct Alert {
71 pub id: String,
73 pub title: String,
75 pub message: String,
77 pub severity: AlertSeverity,
79 pub source: String,
81 pub metrics: HashMap<String, f64>,
83 pub timestamp: u64,
85}
86
87pub struct AlertManager {
89 config: AlertConfig,
91 recent_alerts: HashMap<String, u64>,
93}
94
95impl AlertManager {
96 pub fn new(config: AlertConfig) -> Self {
98 Self {
99 config,
100 recent_alerts: HashMap::new(),
101 }
102 }
103
104 pub async fn process_security_event(&mut self, event: &SecurityEvent) -> Result<()> {
106 if !self.config.enabled {
107 return Ok(());
108 }
109
110 let alert = match event.event_type {
111 SecurityEventType::FailedLogin => {
112 if event.severity >= SecurityEventSeverity::High {
113 Some(Alert {
114 id: format!("failed_login_{}", event.timestamp),
115 title: "High volume of failed login attempts detected".to_string(),
116 message: format!(
117 "Multiple failed login attempts detected for user {:?} from IP {:?}",
118 event.user_id, event.ip_address
119 ),
120 severity: AlertSeverity::Warning,
121 source: "authentication".to_string(),
122 metrics: HashMap::new(),
123 timestamp: event.timestamp,
124 })
125 } else {
126 None
127 }
128 }
129 SecurityEventType::AccountLockout => Some(Alert {
130 id: format!("account_lockout_{}", event.timestamp),
131 title: "Account lockout triggered".to_string(),
132 message: format!(
133 "Account {:?} has been locked due to security policy",
134 event.user_id
135 ),
136 severity: AlertSeverity::Warning,
137 source: "security".to_string(),
138 metrics: HashMap::new(),
139 timestamp: event.timestamp,
140 }),
141 SecurityEventType::PrivilegeEscalation => Some(Alert {
142 id: format!("privilege_escalation_{}", event.timestamp),
143 title: "Privilege escalation attempt detected".to_string(),
144 message: format!("Privilege escalation attempt by user {:?}", event.user_id),
145 severity: AlertSeverity::Critical,
146 source: "authorization".to_string(),
147 metrics: HashMap::new(),
148 timestamp: event.timestamp,
149 }),
150 SecurityEventType::UnusualActivity => Some(Alert {
151 id: format!("unusual_activity_{}", event.timestamp),
152 title: "Unusual activity detected".to_string(),
153 message: format!(
154 "Unusual activity pattern for user {:?}: {:?}",
155 event.user_id, event.details
156 ),
157 severity: AlertSeverity::Warning,
158 source: "security".to_string(),
159 metrics: HashMap::new(),
160 timestamp: event.timestamp,
161 }),
162 SecurityEventType::TokenManipulation => Some(Alert {
163 id: format!("token_manipulation_{}", event.timestamp),
164 title: "Token manipulation attempt".to_string(),
165 message: format!(
166 "Token manipulation detected for user {:?}: {:?}",
167 event.user_id, event.details
168 ),
169 severity: AlertSeverity::Critical,
170 source: "security".to_string(),
171 metrics: HashMap::new(),
172 timestamp: event.timestamp,
173 }),
174 SecurityEventType::ConfigurationChange => Some(Alert {
175 id: format!("config_change_{}", event.timestamp),
176 title: "Security configuration changed".to_string(),
177 message: format!(
178 "Configuration change by user {:?}: {:?}",
179 event.user_id, event.details
180 ),
181 severity: AlertSeverity::Info,
182 source: "configuration".to_string(),
183 metrics: HashMap::new(),
184 timestamp: event.timestamp,
185 }),
186 SecurityEventType::SystemError => Some(Alert {
187 id: format!("system_error_{}", event.timestamp),
188 title: "System error in security subsystem".to_string(),
189 message: format!("System error: {:?}", event.details),
190 severity: AlertSeverity::Warning,
191 source: "system".to_string(),
192 metrics: HashMap::new(),
193 timestamp: event.timestamp,
194 }),
195 };
196
197 if let Some(alert) = alert {
198 self.send_alert(alert).await?;
199 }
200
201 Ok(())
202 }
203
204 pub async fn process_performance_metrics(
206 &mut self,
207 metrics: &HashMap<String, u64>,
208 ) -> Result<()> {
209 if !self.config.enabled {
210 return Ok(());
211 }
212
213 if let Some(&response_time) = metrics.get("avg_response_time_us") {
215 let response_time_ms = response_time / 1000; if response_time_ms > self.config.thresholds.max_response_time_ms {
218 let alert = Alert {
219 id: format!(
220 "high_response_time_{}",
221 crate::monitoring::current_timestamp()
222 ),
223 title: "High response time detected".to_string(),
224 message: format!(
225 "Average response time is {}ms, which exceeds the threshold of {}ms",
226 response_time_ms, self.config.thresholds.max_response_time_ms
227 ),
228 severity: AlertSeverity::Warning,
229 source: "performance".to_string(),
230 metrics: {
231 let mut m = HashMap::new();
232 m.insert("response_time_ms".to_string(), response_time_ms as f64);
233 m.insert(
234 "threshold_ms".to_string(),
235 self.config.thresholds.max_response_time_ms as f64,
236 );
237 m
238 },
239 timestamp: crate::monitoring::current_timestamp(),
240 };
241
242 self.send_alert(alert).await?;
243 }
244 }
245
246 if let (Some(&auth_requests), Some(&auth_failures)) =
248 (metrics.get("auth_requests"), metrics.get("auth_failures"))
249 && auth_requests > 0
250 {
251 let error_rate = auth_failures as f64 / auth_requests as f64;
252
253 if error_rate > self.config.thresholds.error_rate_threshold {
254 let alert = Alert {
255 id: format!("high_error_rate_{}", crate::monitoring::current_timestamp()),
256 title: "High authentication error rate".to_string(),
257 message: format!(
258 "Authentication error rate is {:.1}%, which exceeds the threshold of {:.1}%",
259 error_rate * 100.0,
260 self.config.thresholds.error_rate_threshold * 100.0
261 ),
262 severity: AlertSeverity::Critical,
263 source: "authentication".to_string(),
264 metrics: {
265 let mut m = HashMap::new();
266 m.insert("error_rate".to_string(), error_rate);
267 m.insert(
268 "threshold".to_string(),
269 self.config.thresholds.error_rate_threshold,
270 );
271 m.insert("total_requests".to_string(), auth_requests as f64);
272 m.insert("failed_requests".to_string(), auth_failures as f64);
273 m
274 },
275 timestamp: crate::monitoring::current_timestamp(),
276 };
277
278 self.send_alert(alert).await?;
279 }
280 }
281
282 Ok(())
283 }
284
285 async fn send_alert(&mut self, alert: Alert) -> Result<()> {
287 if let Some(&last_alert_time) = self.recent_alerts.get(&alert.id) {
289 let current_time = crate::monitoring::current_timestamp();
290 if current_time - last_alert_time < self.config.thresholds.alert_cooldown_seconds {
291 tracing::debug!("Alert {} is in cooldown period, skipping", alert.id);
292 return Ok(());
293 }
294 }
295
296 self.recent_alerts.insert(alert.id.clone(), alert.timestamp);
298
299 for channel in &self.config.channels {
301 self.send_to_channel(&alert, channel).await?;
302 }
303
304 tracing::info!(
305 "Alert sent: {} - {} (Severity: {:?})",
306 alert.title,
307 alert.message,
308 alert.severity
309 );
310
311 Ok(())
312 }
313
314 async fn send_to_channel(&self, alert: &Alert, channel: &NotificationChannel) -> Result<()> {
316 match channel {
317 NotificationChannel::Email { recipients } => {
318 use lettre::{
319 AsyncSmtpTransport, AsyncTransport, Message, Tokio1Executor, message::Mailbox,
320 transport::smtp::authentication::Credentials,
321 };
322
323 let smtp_host = match std::env::var("AUTH_SMTP_HOST") {
331 Ok(h) => h,
332 Err(_) => {
333 tracing::warn!(
334 recipients = ?recipients,
335 title = %alert.title,
336 severity = ?alert.severity,
337 "EMAIL ALERT: set AUTH_SMTP_HOST to enable SMTP delivery"
338 );
339 return Ok(());
340 }
341 };
342 let smtp_port: u16 = std::env::var("AUTH_SMTP_PORT")
343 .ok()
344 .and_then(|p| p.parse().ok())
345 .unwrap_or(587);
346 let from_addr = std::env::var("AUTH_SMTP_FROM")
347 .unwrap_or_else(|_| format!("alerts@{}", smtp_host));
348
349 let from_mailbox: Mailbox = match from_addr.parse() {
350 Ok(m) => m,
351 Err(e) => {
352 tracing::error!(from = %from_addr, error = %e, "Invalid AUTH_SMTP_FROM address");
353 return Ok(());
354 }
355 };
356
357 let mut builder = match AsyncSmtpTransport::<Tokio1Executor>::starttls_relay(
358 &smtp_host,
359 ) {
360 Ok(b) => b.port(smtp_port),
361 Err(e) => {
362 tracing::error!(host = %smtp_host, error = %e, "Failed to create SMTP transport");
363 return Ok(());
364 }
365 };
366 if let (Ok(user), Ok(pass)) = (
367 std::env::var("AUTH_SMTP_USERNAME"),
368 std::env::var("AUTH_SMTP_PASSWORD"),
369 ) {
370 builder = builder.credentials(Credentials::new(user, pass));
371 }
372 let mailer = builder.build();
373
374 let subject = format!("[{:?}] {}", alert.severity, alert.title);
375 let body = format!(
376 "Alert: {}\nSeverity: {:?}\nSource: {}\nMessage: {}\nTimestamp: {}",
377 alert.title, alert.severity, alert.source, alert.message, alert.timestamp
378 );
379
380 for recipient in recipients {
381 let to_mailbox: Mailbox = match recipient.parse() {
382 Ok(m) => m,
383 Err(e) => {
384 tracing::error!(
385 recipient = %recipient, error = %e,
386 "Invalid recipient address — skipping"
387 );
388 continue;
389 }
390 };
391 match Message::builder()
392 .from(from_mailbox.clone())
393 .to(to_mailbox)
394 .subject(&subject)
395 .body(body.clone())
396 {
397 Ok(email) => {
398 if let Err(e) = mailer.send(email).await {
399 tracing::error!(
400 recipient = %recipient, error = %e,
401 "Failed to send email alert"
402 );
403 } else {
404 tracing::info!(
405 recipient = %recipient,
406 "Email alert sent: {}", alert.title
407 );
408 }
409 }
410 Err(e) => {
411 tracing::error!(
412 recipient = %recipient, error = %e,
413 "Failed to build email message"
414 );
415 }
416 }
417 }
418 }
419 NotificationChannel::Slack { webhook_url } => {
420 let payload = serde_json::json!({
421 "text": format!(
422 "*[{:?}]* {} — {}",
423 alert.severity, alert.title, alert.message
424 )
425 });
426 let client = reqwest::Client::new();
427 if let Err(e) = client.post(webhook_url).json(&payload).send().await {
428 tracing::error!(
429 webhook_url = %webhook_url,
430 error = %e,
431 "Failed to send Slack alert"
432 );
433 } else {
434 tracing::info!(webhook_url = %webhook_url, "Slack alert sent: {}", alert.title);
435 }
436 }
437 NotificationChannel::Teams { webhook_url } => {
438 let payload = serde_json::json!({
440 "@type": "MessageCard",
441 "@context": "http://schema.org/extensions",
442 "themeColor": match alert.severity {
443 AlertSeverity::Critical => "FF0000",
444 AlertSeverity::Warning => "FFA500",
445 AlertSeverity::Info => "0078D7",
446 },
447 "summary": &alert.title,
448 "sections": [{
449 "activityTitle": &alert.title,
450 "activityText": &alert.message,
451 "facts": [
452 { "name": "Severity", "value": format!("{:?}", alert.severity) },
453 { "name": "Source", "value": &alert.source },
454 ]
455 }]
456 });
457 let client = reqwest::Client::new();
458 if let Err(e) = client.post(webhook_url).json(&payload).send().await {
459 tracing::error!(
460 webhook_url = %webhook_url,
461 error = %e,
462 "Failed to send Teams alert"
463 );
464 } else {
465 tracing::info!(webhook_url = %webhook_url, "Teams alert sent: {}", alert.title);
466 }
467 }
468 NotificationChannel::Webhook { url, headers } => {
469 let payload = serde_json::json!({
470 "id": &alert.id,
471 "title": &alert.title,
472 "message": &alert.message,
473 "severity": format!("{:?}", alert.severity),
474 "source": &alert.source,
475 "metrics": &alert.metrics,
476 "timestamp": alert.timestamp,
477 });
478 let client = reqwest::Client::new();
479 let mut req = client.post(url).json(&payload);
480 for (k, v) in headers {
481 req = req.header(k, v);
482 }
483 if let Err(e) = req.send().await {
484 tracing::error!(url = %url, error = %e, "Failed to send webhook alert");
485 } else {
486 tracing::info!(url = %url, "Webhook alert sent: {}", alert.title);
487 }
488 }
489 NotificationChannel::Log { level } => match level.as_str() {
490 "error" => tracing::error!("ALERT: {} - {}", alert.title, alert.message),
491 "warn" => tracing::warn!("ALERT: {} - {}", alert.title, alert.message),
492 _ => tracing::info!("ALERT: {} - {}", alert.title, alert.message),
493 },
494 }
495
496 Ok(())
497 }
498
499 pub fn cleanup_alert_history(&mut self, max_age_seconds: u64) {
501 let current_time = crate::monitoring::current_timestamp();
502 self.recent_alerts
503 .retain(|_, &mut timestamp| current_time - timestamp < max_age_seconds);
504 }
505}
506
507impl Default for AlertConfig {
508 fn default() -> Self {
509 Self {
510 enabled: true,
511 thresholds: AlertThresholds::default(),
512 channels: vec![NotificationChannel::Log {
513 level: "warn".to_string(),
514 }],
515 }
516 }
517}