mockforge_performance/
bottleneck.rs

1//! Bottleneck Simulation
2//!
3//! Simulates various types of bottlenecks to observe system behavior under stress.
4
5use serde::{Deserialize, Serialize};
6use std::sync::Arc;
7use std::time::Duration;
8use tokio::sync::RwLock;
9use tokio::time::sleep;
10use tracing::debug;
11
12/// Bottleneck type
13#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
14#[serde(rename_all = "snake_case")]
15pub enum BottleneckType {
16    /// CPU bottleneck (simulated with busy-wait)
17    Cpu,
18    /// Memory bottleneck (simulated with allocation)
19    Memory,
20    /// Network bottleneck (simulated with delay)
21    Network,
22    /// I/O bottleneck (simulated with delay)
23    Io,
24    /// Database bottleneck (simulated with delay)
25    Database,
26}
27
28/// Bottleneck configuration
29#[derive(Debug, Clone, Serialize, Deserialize)]
30pub struct BottleneckConfig {
31    /// Bottleneck type
32    pub bottleneck_type: BottleneckType,
33    /// Severity (0.0-1.0, where 1.0 is maximum bottleneck)
34    pub severity: f64,
35    /// Affected endpoint pattern (None = all endpoints)
36    pub endpoint_pattern: Option<String>,
37    /// Duration in seconds (None = indefinite)
38    pub duration_secs: Option<u64>,
39}
40
41impl BottleneckConfig {
42    /// Create a new bottleneck configuration
43    pub fn new(bottleneck_type: BottleneckType, severity: f64) -> Self {
44        Self {
45            bottleneck_type,
46            severity,
47            endpoint_pattern: None,
48            duration_secs: None,
49        }
50    }
51
52    /// Set endpoint pattern
53    pub fn with_endpoint_pattern(mut self, pattern: String) -> Self {
54        self.endpoint_pattern = Some(pattern);
55        self
56    }
57
58    /// Set duration
59    pub fn with_duration(mut self, duration_secs: u64) -> Self {
60        self.duration_secs = Some(duration_secs);
61        self
62    }
63}
64
65/// Bottleneck simulator
66///
67/// Simulates various types of bottlenecks.
68#[derive(Debug, Clone)]
69pub struct BottleneckSimulator {
70    /// Active bottlenecks
71    bottlenecks: Arc<RwLock<Vec<BottleneckConfig>>>,
72}
73
74impl BottleneckSimulator {
75    /// Create a new bottleneck simulator
76    pub fn new() -> Self {
77        Self {
78            bottlenecks: Arc::new(RwLock::new(Vec::new())),
79        }
80    }
81
82    /// Add a bottleneck
83    pub async fn add_bottleneck(&self, config: BottleneckConfig) {
84        let bottleneck_type = config.bottleneck_type;
85        let mut bottlenecks = self.bottlenecks.write().await;
86        bottlenecks.push(config);
87        debug!("Bottleneck added: {:?}", bottleneck_type);
88    }
89
90    /// Remove all bottlenecks
91    pub async fn clear_bottlenecks(&self) {
92        let mut bottlenecks = self.bottlenecks.write().await;
93        bottlenecks.clear();
94        debug!("All bottlenecks cleared");
95    }
96
97    /// Get active bottlenecks
98    pub async fn get_bottlenecks(&self) -> Vec<BottleneckConfig> {
99        let bottlenecks = self.bottlenecks.read().await;
100        bottlenecks.clone()
101    }
102
103    /// Apply bottlenecks for a request
104    ///
105    /// Returns the total delay in milliseconds.
106    pub async fn apply_bottlenecks(&self, endpoint: &str) -> u64 {
107        let bottlenecks = self.bottlenecks.read().await;
108        let mut total_delay_ms = 0u64;
109
110        for bottleneck in bottlenecks.iter() {
111            // Check if endpoint matches pattern
112            if let Some(ref pattern) = bottleneck.endpoint_pattern {
113                if !endpoint.contains(pattern) {
114                    continue;
115                }
116            }
117
118            // Calculate delay based on bottleneck type and severity
119            let delay_ms = match bottleneck.bottleneck_type {
120                BottleneckType::Cpu => {
121                    // CPU bottleneck: busy-wait
122                    let cpu_time_ms = (bottleneck.severity * 100.0) as u64;
123                    self.simulate_cpu_bottleneck(cpu_time_ms).await;
124                    0 // CPU bottleneck doesn't add delay, it uses CPU time
125                }
126                BottleneckType::Memory => {
127                    // Memory bottleneck: allocation
128                    let memory_mb = (bottleneck.severity * 100.0) as usize;
129                    self.simulate_memory_bottleneck(memory_mb).await;
130                    0 // Memory bottleneck doesn't add delay
131                }
132                BottleneckType::Network => {
133                    // Network bottleneck: delay
134                    (bottleneck.severity * 500.0) as u64
135                }
136                BottleneckType::Io => {
137                    // I/O bottleneck: delay
138                    (bottleneck.severity * 300.0) as u64
139                }
140                BottleneckType::Database => {
141                    // Database bottleneck: delay
142                    (bottleneck.severity * 400.0) as u64
143                }
144            };
145
146            total_delay_ms += delay_ms;
147        }
148
149        if total_delay_ms > 0 {
150            sleep(Duration::from_millis(total_delay_ms)).await;
151        }
152
153        total_delay_ms
154    }
155
156    /// Simulate CPU bottleneck (busy-wait)
157    async fn simulate_cpu_bottleneck(&self, duration_ms: u64) {
158        let start = std::time::Instant::now();
159        let duration = Duration::from_millis(duration_ms);
160
161        // Busy-wait to simulate CPU load
162        while start.elapsed() < duration {
163            // Spin loop
164            std::hint::spin_loop();
165        }
166    }
167
168    /// Simulate memory bottleneck (allocation)
169    async fn simulate_memory_bottleneck(&self, size_mb: usize) {
170        // Allocate memory to simulate memory pressure
171        let _memory: Vec<u8> = vec![0; size_mb * 1024 * 1024];
172        // Memory is dropped when function returns
173    }
174}
175
176impl Default for BottleneckSimulator {
177    fn default() -> Self {
178        Self::new()
179    }
180}
181
182#[cfg(test)]
183mod tests {
184    use super::*;
185
186    #[test]
187    fn test_bottleneck_type_clone() {
188        let bt = BottleneckType::Cpu;
189        let cloned = bt.clone();
190        assert_eq!(bt, cloned);
191    }
192
193    #[test]
194    fn test_bottleneck_type_debug() {
195        let bt = BottleneckType::Memory;
196        let debug = format!("{:?}", bt);
197        assert!(debug.contains("Memory"));
198    }
199
200    #[test]
201    fn test_bottleneck_type_serialize() {
202        let bt = BottleneckType::Network;
203        let json = serde_json::to_string(&bt).unwrap();
204        assert_eq!(json, "\"network\"");
205    }
206
207    #[test]
208    fn test_bottleneck_type_deserialize() {
209        let bt: BottleneckType = serde_json::from_str("\"database\"").unwrap();
210        assert_eq!(bt, BottleneckType::Database);
211    }
212
213    #[test]
214    fn test_bottleneck_type_serialize_all_variants() {
215        assert_eq!(serde_json::to_string(&BottleneckType::Cpu).unwrap(), "\"cpu\"");
216        assert_eq!(serde_json::to_string(&BottleneckType::Memory).unwrap(), "\"memory\"");
217        assert_eq!(serde_json::to_string(&BottleneckType::Network).unwrap(), "\"network\"");
218        assert_eq!(serde_json::to_string(&BottleneckType::Io).unwrap(), "\"io\"");
219        assert_eq!(serde_json::to_string(&BottleneckType::Database).unwrap(), "\"database\"");
220    }
221
222    #[test]
223    fn test_bottleneck_type_copy() {
224        let bt = BottleneckType::Io;
225        let copied: BottleneckType = bt;
226        assert_eq!(bt, copied);
227    }
228
229    #[test]
230    fn test_bottleneck_config_new() {
231        let config = BottleneckConfig::new(BottleneckType::Cpu, 0.5);
232        assert_eq!(config.bottleneck_type, BottleneckType::Cpu);
233        assert_eq!(config.severity, 0.5);
234        assert!(config.endpoint_pattern.is_none());
235        assert!(config.duration_secs.is_none());
236    }
237
238    #[test]
239    fn test_bottleneck_config_with_endpoint_pattern() {
240        let config = BottleneckConfig::new(BottleneckType::Network, 0.8)
241            .with_endpoint_pattern("/api/users".to_string());
242        assert_eq!(config.endpoint_pattern, Some("/api/users".to_string()));
243    }
244
245    #[test]
246    fn test_bottleneck_config_with_duration() {
247        let config = BottleneckConfig::new(BottleneckType::Database, 0.3).with_duration(60);
248        assert_eq!(config.duration_secs, Some(60));
249    }
250
251    #[test]
252    fn test_bottleneck_config_builder_chain() {
253        let config = BottleneckConfig::new(BottleneckType::Memory, 0.7)
254            .with_endpoint_pattern("/api/orders".to_string())
255            .with_duration(120);
256
257        assert_eq!(config.bottleneck_type, BottleneckType::Memory);
258        assert_eq!(config.severity, 0.7);
259        assert_eq!(config.endpoint_pattern, Some("/api/orders".to_string()));
260        assert_eq!(config.duration_secs, Some(120));
261    }
262
263    #[test]
264    fn test_bottleneck_config_clone() {
265        let config = BottleneckConfig::new(BottleneckType::Io, 0.4).with_duration(30);
266        let cloned = config.clone();
267        assert_eq!(config.bottleneck_type, cloned.bottleneck_type);
268        assert_eq!(config.severity, cloned.severity);
269    }
270
271    #[test]
272    fn test_bottleneck_config_debug() {
273        let config = BottleneckConfig::new(BottleneckType::Cpu, 0.9);
274        let debug = format!("{:?}", config);
275        assert!(debug.contains("BottleneckConfig"));
276        assert!(debug.contains("Cpu"));
277    }
278
279    #[test]
280    fn test_bottleneck_config_serialize() {
281        let config = BottleneckConfig::new(BottleneckType::Network, 0.5);
282        let json = serde_json::to_string(&config).unwrap();
283        assert!(json.contains("\"bottleneck_type\":\"network\""));
284        assert!(json.contains("\"severity\":0.5"));
285    }
286
287    #[test]
288    fn test_bottleneck_simulator_new() {
289        let simulator = BottleneckSimulator::new();
290        let debug = format!("{:?}", simulator);
291        assert!(debug.contains("BottleneckSimulator"));
292    }
293
294    #[test]
295    fn test_bottleneck_simulator_default() {
296        let simulator = BottleneckSimulator::default();
297        let debug = format!("{:?}", simulator);
298        assert!(debug.contains("BottleneckSimulator"));
299    }
300
301    #[test]
302    fn test_bottleneck_simulator_clone() {
303        let simulator = BottleneckSimulator::new();
304        let _cloned = simulator.clone();
305    }
306
307    #[tokio::test]
308    async fn test_bottleneck_simulator() {
309        let simulator = BottleneckSimulator::new();
310
311        let config = BottleneckConfig::new(BottleneckType::Network, 0.5)
312            .with_endpoint_pattern("/api/users".to_string());
313
314        simulator.add_bottleneck(config).await;
315
316        let bottlenecks = simulator.get_bottlenecks().await;
317        assert_eq!(bottlenecks.len(), 1);
318    }
319
320    #[tokio::test]
321    async fn test_bottleneck_simulator_clear() {
322        let simulator = BottleneckSimulator::new();
323
324        simulator.add_bottleneck(BottleneckConfig::new(BottleneckType::Cpu, 0.5)).await;
325        simulator
326            .add_bottleneck(BottleneckConfig::new(BottleneckType::Memory, 0.3))
327            .await;
328
329        let bottlenecks = simulator.get_bottlenecks().await;
330        assert_eq!(bottlenecks.len(), 2);
331
332        simulator.clear_bottlenecks().await;
333
334        let bottlenecks = simulator.get_bottlenecks().await;
335        assert!(bottlenecks.is_empty());
336    }
337
338    #[tokio::test]
339    async fn test_bottleneck_simulator_multiple_bottlenecks() {
340        let simulator = BottleneckSimulator::new();
341
342        simulator
343            .add_bottleneck(BottleneckConfig::new(BottleneckType::Network, 0.2))
344            .await;
345        simulator.add_bottleneck(BottleneckConfig::new(BottleneckType::Io, 0.3)).await;
346        simulator
347            .add_bottleneck(BottleneckConfig::new(BottleneckType::Database, 0.4))
348            .await;
349
350        let bottlenecks = simulator.get_bottlenecks().await;
351        assert_eq!(bottlenecks.len(), 3);
352    }
353
354    #[tokio::test]
355    async fn test_apply_bottlenecks() {
356        let simulator = BottleneckSimulator::new();
357
358        let config = BottleneckConfig::new(BottleneckType::Network, 0.1);
359        simulator.add_bottleneck(config).await;
360
361        let start = std::time::Instant::now();
362        simulator.apply_bottlenecks("/api/test").await;
363        let elapsed = start.elapsed();
364
365        // Should have added some delay
366        assert!(elapsed.as_millis() > 0);
367    }
368
369    #[tokio::test]
370    async fn test_apply_bottlenecks_with_pattern_match() {
371        let simulator = BottleneckSimulator::new();
372
373        let config = BottleneckConfig::new(BottleneckType::Network, 0.1)
374            .with_endpoint_pattern("/api/users".to_string());
375        simulator.add_bottleneck(config).await;
376
377        let start = std::time::Instant::now();
378        let delay = simulator.apply_bottlenecks("/api/users/123").await;
379        let elapsed = start.elapsed();
380
381        // Should have applied delay because endpoint contains pattern
382        assert!(elapsed.as_millis() > 0 || delay > 0);
383    }
384
385    #[tokio::test]
386    async fn test_apply_bottlenecks_with_pattern_no_match() {
387        let simulator = BottleneckSimulator::new();
388
389        let config = BottleneckConfig::new(BottleneckType::Network, 0.5)
390            .with_endpoint_pattern("/api/users".to_string());
391        simulator.add_bottleneck(config).await;
392
393        let start = std::time::Instant::now();
394        let delay = simulator.apply_bottlenecks("/api/orders").await;
395        let elapsed = start.elapsed();
396
397        // Should not have applied delay because endpoint doesn't match
398        assert_eq!(delay, 0);
399        assert!(elapsed.as_millis() < 100);
400    }
401
402    #[tokio::test]
403    async fn test_apply_bottlenecks_io() {
404        let simulator = BottleneckSimulator::new();
405
406        let config = BottleneckConfig::new(BottleneckType::Io, 0.1);
407        simulator.add_bottleneck(config).await;
408
409        let start = std::time::Instant::now();
410        let delay = simulator.apply_bottlenecks("/api/test").await;
411        let elapsed = start.elapsed();
412
413        // I/O bottleneck adds delay
414        assert!(delay > 0 || elapsed.as_millis() > 0);
415    }
416
417    #[tokio::test]
418    async fn test_apply_bottlenecks_database() {
419        let simulator = BottleneckSimulator::new();
420
421        let config = BottleneckConfig::new(BottleneckType::Database, 0.1);
422        simulator.add_bottleneck(config).await;
423
424        let start = std::time::Instant::now();
425        let delay = simulator.apply_bottlenecks("/api/test").await;
426        let elapsed = start.elapsed();
427
428        // Database bottleneck adds delay
429        assert!(delay > 0 || elapsed.as_millis() > 0);
430    }
431
432    #[tokio::test]
433    async fn test_apply_bottlenecks_cpu() {
434        let simulator = BottleneckSimulator::new();
435
436        // Very low severity to keep test fast
437        let config = BottleneckConfig::new(BottleneckType::Cpu, 0.01);
438        simulator.add_bottleneck(config).await;
439
440        // CPU bottleneck returns 0 delay but uses CPU time
441        let delay = simulator.apply_bottlenecks("/api/test").await;
442        assert_eq!(delay, 0); // CPU doesn't add delay, it uses CPU time
443    }
444
445    #[tokio::test]
446    async fn test_apply_bottlenecks_memory() {
447        let simulator = BottleneckSimulator::new();
448
449        // Very low severity to keep test fast
450        let config = BottleneckConfig::new(BottleneckType::Memory, 0.01);
451        simulator.add_bottleneck(config).await;
452
453        // Memory bottleneck returns 0 delay
454        let delay = simulator.apply_bottlenecks("/api/test").await;
455        assert_eq!(delay, 0);
456    }
457
458    #[tokio::test]
459    async fn test_apply_bottlenecks_no_bottlenecks() {
460        let simulator = BottleneckSimulator::new();
461
462        let start = std::time::Instant::now();
463        let delay = simulator.apply_bottlenecks("/api/test").await;
464        let elapsed = start.elapsed();
465
466        // No bottlenecks, should be fast
467        assert_eq!(delay, 0);
468        assert!(elapsed.as_millis() < 10);
469    }
470}