Skip to main content

simular/domains/
monte_carlo.rs

1//! Monte Carlo simulation engine.
2//!
3//! Implements Monte Carlo methods with variance reduction:
4//! - Antithetic variates
5//! - Control variates
6//! - Importance sampling
7//! - Stratified sampling
8//!
9//! # Convergence
10//!
11//! By the Central Limit Theorem, Monte Carlo estimators converge at O(n^{-1/2})
12//! regardless of dimension, making them ideal for high-dimensional problems.
13
14use crate::engine::rng::SimRng;
15use serde::{Deserialize, Serialize};
16
17/// Result of a Monte Carlo simulation.
18#[derive(Debug, Clone, Serialize, Deserialize)]
19pub struct MonteCarloResult {
20    /// Point estimate.
21    pub estimate: f64,
22    /// Standard error of the estimate.
23    pub std_error: f64,
24    /// Number of samples used.
25    pub samples: usize,
26    /// 95% confidence interval (estimate ± 1.96 * `std_error`).
27    pub confidence_interval: (f64, f64),
28    /// Variance reduction factor (if applicable).
29    pub variance_reduction_factor: Option<f64>,
30}
31
32impl MonteCarloResult {
33    /// Create a new Monte Carlo result.
34    #[must_use]
35    pub fn new(estimate: f64, std_error: f64, samples: usize) -> Self {
36        let ci_half = 1.96 * std_error;
37        Self {
38            estimate,
39            std_error,
40            samples,
41            confidence_interval: (estimate - ci_half, estimate + ci_half),
42            variance_reduction_factor: None,
43        }
44    }
45
46    /// Set the variance reduction factor.
47    #[must_use]
48    pub fn with_variance_reduction(mut self, factor: f64) -> Self {
49        self.variance_reduction_factor = Some(factor);
50        self
51    }
52
53    /// Check if value is within confidence interval.
54    #[must_use]
55    pub fn contains(&self, value: f64) -> bool {
56        value >= self.confidence_interval.0 && value <= self.confidence_interval.1
57    }
58
59    /// Get relative error.
60    #[must_use]
61    pub fn relative_error(&self) -> f64 {
62        if self.estimate.abs() < f64::EPSILON {
63            self.std_error
64        } else {
65            self.std_error / self.estimate.abs()
66        }
67    }
68}
69
70/// Variance reduction technique.
71#[derive(Debug, Clone, Default)]
72pub enum VarianceReduction {
73    /// No variance reduction.
74    #[default]
75    None,
76    /// Antithetic variates: use (U, 1-U) pairs.
77    Antithetic,
78    /// Control variate with known expectation.
79    ControlVariate {
80        /// Control function.
81        control_fn: fn(f64) -> f64,
82        /// Known expectation of control.
83        expectation: f64,
84    },
85    /// Importance sampling with proposal distribution.
86    ///
87    /// Standard importance sampling: `E_p[f] = E_q[f * p/q]`
88    ImportanceSampling {
89        /// Sample from proposal distribution q(x).
90        sample_fn: fn(&mut SimRng) -> f64,
91        /// Likelihood ratio p(x)/q(x) where p is target, q is proposal.
92        likelihood_ratio: fn(f64) -> f64,
93    },
94    /// Self-normalizing importance sampling.
95    ///
96    /// More robust when the normalizing constant is unknown.
97    /// Uses: `E_p[f] ≈ Σ(w_i * f(x_i)) / Σ(w_i)`
98    SelfNormalizingIS {
99        /// Sample from proposal distribution q(x).
100        sample_fn: fn(&mut SimRng) -> f64,
101        /// Unnormalized weight w(x) ∝ p(x)/q(x).
102        weight_fn: fn(f64) -> f64,
103    },
104    /// Stratified sampling.
105    Stratified {
106        /// Number of strata.
107        num_strata: usize,
108    },
109}
110
111/// Monte Carlo engine for stochastic simulation.
112#[derive(Debug)]
113pub struct MonteCarloEngine {
114    /// Number of samples.
115    n_samples: usize,
116    /// Variance reduction technique.
117    variance_reduction: VarianceReduction,
118}
119
120impl MonteCarloEngine {
121    /// Create a new Monte Carlo engine.
122    #[must_use]
123    pub const fn new(n_samples: usize, variance_reduction: VarianceReduction) -> Self {
124        Self {
125            n_samples,
126            variance_reduction,
127        }
128    }
129
130    /// Create engine with default settings.
131    #[must_use]
132    pub const fn with_samples(n_samples: usize) -> Self {
133        Self::new(n_samples, VarianceReduction::None)
134    }
135
136    /// Run Monte Carlo simulation with a function f: [0,1] -> R.
137    ///
138    /// # Example
139    ///
140    /// ```rust
141    /// use simular::domains::monte_carlo::{MonteCarloEngine, VarianceReduction};
142    /// use simular::engine::rng::SimRng;
143    ///
144    /// let engine = MonteCarloEngine::with_samples(10000);
145    /// let mut rng = SimRng::new(42);
146    ///
147    /// // Estimate integral of x^2 from 0 to 1 (true value = 1/3)
148    /// let result = engine.run(|x| x * x, &mut rng);
149    /// assert!((result.estimate - 1.0/3.0).abs() < 0.01);
150    /// ```
151    #[must_use]
152    pub fn run<F>(&self, f: F, rng: &mut SimRng) -> MonteCarloResult
153    where
154        F: Fn(f64) -> f64,
155    {
156        match &self.variance_reduction {
157            VarianceReduction::None => self.run_standard(&f, rng),
158            VarianceReduction::Antithetic => self.run_antithetic(&f, rng),
159            VarianceReduction::ControlVariate {
160                control_fn,
161                expectation,
162            } => self.run_control_variate(&f, *control_fn, *expectation, rng),
163            VarianceReduction::ImportanceSampling {
164                sample_fn,
165                likelihood_ratio,
166            } => self.run_importance(&f, *sample_fn, *likelihood_ratio, rng),
167            VarianceReduction::SelfNormalizingIS {
168                sample_fn,
169                weight_fn,
170            } => self.run_self_normalizing_is(&f, *sample_fn, *weight_fn, rng),
171            VarianceReduction::Stratified { num_strata } => {
172                self.run_stratified(&f, *num_strata, rng)
173            }
174        }
175    }
176
177    /// Run multi-dimensional Monte Carlo.
178    ///
179    /// # Example
180    ///
181    /// ```rust
182    /// use simular::domains::monte_carlo::MonteCarloEngine;
183    /// use simular::engine::rng::SimRng;
184    ///
185    /// let engine = MonteCarloEngine::with_samples(10000);
186    /// let mut rng = SimRng::new(42);
187    ///
188    /// // Estimate volume of unit sphere in 3D (true value ≈ 4.189)
189    /// let result = engine.run_nd(3, |x| {
190    ///     let r2 = x.iter().map(|&xi| xi * xi).sum::<f64>();
191    ///     if r2 <= 1.0 { 8.0 } else { 0.0 } // 8 = 2^3 for [-1,1]^3 domain
192    /// }, &mut rng);
193    /// ```
194    #[must_use]
195    pub fn run_nd<F>(&self, dim: usize, f: F, rng: &mut SimRng) -> MonteCarloResult
196    where
197        F: Fn(&[f64]) -> f64,
198    {
199        let mut sum = 0.0;
200        let mut sum_sq = 0.0;
201        let mut samples = Vec::with_capacity(dim);
202
203        for _ in 0..self.n_samples {
204            samples.clear();
205            for _ in 0..dim {
206                samples.push(rng.gen_f64());
207            }
208
209            let value = f(&samples);
210            sum += value;
211            sum_sq += value * value;
212        }
213
214        let n = self.n_samples as f64;
215        let mean = sum / n;
216        let variance = (sum_sq / n) - (mean * mean);
217        let std_error = (variance / n).sqrt();
218
219        MonteCarloResult::new(mean, std_error, self.n_samples)
220    }
221
222    /// Standard Monte Carlo (no variance reduction).
223    fn run_standard<F>(&self, f: &F, rng: &mut SimRng) -> MonteCarloResult
224    where
225        F: Fn(f64) -> f64,
226    {
227        let mut sum = 0.0;
228        let mut sum_sq = 0.0;
229
230        for _ in 0..self.n_samples {
231            let u = rng.gen_f64();
232            let value = f(u);
233            sum += value;
234            sum_sq += value * value;
235        }
236
237        let n = self.n_samples as f64;
238        let mean = sum / n;
239        let variance = (sum_sq / n) - (mean * mean);
240        let std_error = (variance / n).sqrt();
241
242        MonteCarloResult::new(mean, std_error, self.n_samples)
243    }
244
245    /// Antithetic variates: use (U, 1-U) pairs to induce negative correlation.
246    fn run_antithetic<F>(&self, f: &F, rng: &mut SimRng) -> MonteCarloResult
247    where
248        F: Fn(f64) -> f64,
249    {
250        let n_pairs = self.n_samples / 2;
251        let mut sum = 0.0;
252        let mut sum_sq = 0.0;
253
254        for _ in 0..n_pairs {
255            let u = rng.gen_f64();
256
257            // Antithetic pair
258            let y1 = f(u);
259            let y2 = f(1.0 - u);
260
261            // Use average of pair
262            let avg = (y1 + y2) / 2.0;
263            sum += avg;
264            sum_sq += avg * avg;
265        }
266
267        let n = n_pairs as f64;
268        let mean = sum / n;
269        let variance = (sum_sq / n) - (mean * mean);
270        let std_error = (variance / n).sqrt();
271
272        // Effective samples is n_pairs * 2
273        let mut result = MonteCarloResult::new(mean, std_error, n_pairs * 2);
274
275        // Estimate variance reduction factor
276        let standard_result = self.run_standard(f, &mut rng.clone());
277        if standard_result.std_error > f64::EPSILON {
278            result = result
279                .with_variance_reduction(standard_result.std_error / std_error.max(f64::EPSILON));
280        }
281
282        result
283    }
284
285    /// Control variate method.
286    fn run_control_variate<F>(
287        &self,
288        f: &F,
289        control_fn: fn(f64) -> f64,
290        control_expectation: f64,
291        rng: &mut SimRng,
292    ) -> MonteCarloResult
293    where
294        F: Fn(f64) -> f64,
295    {
296        // First pass: estimate correlation
297        let mut sum_f = 0.0;
298        let mut sum_c = 0.0;
299        let mut sum_fc = 0.0;
300        let mut sum_c2 = 0.0;
301        let samples: Vec<f64> = rng.sample_n(self.n_samples);
302
303        for &u in &samples {
304            let fv = f(u);
305            let cv = control_fn(u);
306            sum_f += fv;
307            sum_c += cv;
308            sum_fc += fv * cv;
309            sum_c2 += cv * cv;
310        }
311
312        let n = self.n_samples as f64;
313        let mean_f = sum_f / n;
314        let mean_c = sum_c / n;
315
316        // Estimate optimal coefficient
317        let cov_fc = (sum_fc / n) - mean_f * mean_c;
318        let var_c = (sum_c2 / n) - mean_c * mean_c;
319        let c_star = if var_c > f64::EPSILON {
320            -cov_fc / var_c
321        } else {
322            0.0
323        };
324
325        // Second pass: compute adjusted estimate
326        let mut sum_adj = 0.0;
327        let mut sum_adj_sq = 0.0;
328
329        for &u in &samples {
330            let fv = f(u);
331            let cv = control_fn(u);
332            let adjusted = fv + c_star * (cv - control_expectation);
333            sum_adj += adjusted;
334            sum_adj_sq += adjusted * adjusted;
335        }
336
337        let mean_adj = sum_adj / n;
338        let variance_adj = (sum_adj_sq / n) - mean_adj * mean_adj;
339        let std_error = (variance_adj / n).sqrt();
340
341        MonteCarloResult::new(mean_adj, std_error, self.n_samples)
342    }
343
344    /// Importance sampling.
345    fn run_importance<F>(
346        &self,
347        f: &F,
348        sample_fn: fn(&mut SimRng) -> f64,
349        likelihood_ratio: fn(f64) -> f64,
350        rng: &mut SimRng,
351    ) -> MonteCarloResult
352    where
353        F: Fn(f64) -> f64,
354    {
355        let mut sum = 0.0;
356        let mut sum_sq = 0.0;
357
358        for _ in 0..self.n_samples {
359            let x = sample_fn(rng);
360            let weight = likelihood_ratio(x);
361            let value = f(x) * weight;
362            sum += value;
363            sum_sq += value * value;
364        }
365
366        let n = self.n_samples as f64;
367        let mean = sum / n;
368        let variance = (sum_sq / n) - mean * mean;
369        let std_error = (variance / n).sqrt();
370
371        MonteCarloResult::new(mean, std_error, self.n_samples)
372    }
373
374    /// Self-normalizing importance sampling.
375    ///
376    /// More robust when the normalizing constant is unknown.
377    /// Uses ratio estimator: `Σ(w_i * f(x_i)) / Σ(w_i)`
378    fn run_self_normalizing_is<F>(
379        &self,
380        f: &F,
381        sample_fn: fn(&mut SimRng) -> f64,
382        weight_fn: fn(f64) -> f64,
383        rng: &mut SimRng,
384    ) -> MonteCarloResult
385    where
386        F: Fn(f64) -> f64,
387    {
388        // Collect all weights and values
389        let mut weights = Vec::with_capacity(self.n_samples);
390        let mut values = Vec::with_capacity(self.n_samples);
391
392        for _ in 0..self.n_samples {
393            let x = sample_fn(rng);
394            let w = weight_fn(x);
395            let fv = f(x);
396
397            weights.push(w);
398            values.push(fv);
399        }
400
401        // Normalize weights
402        let weight_sum: f64 = weights.iter().sum();
403        if weight_sum.abs() < f64::EPSILON {
404            return MonteCarloResult::new(0.0, f64::INFINITY, self.n_samples);
405        }
406
407        // Compute weighted mean: Σ(w_i * f_i) / Σ(w_i)
408        let weighted_sum: f64 = weights.iter().zip(values.iter()).map(|(w, v)| w * v).sum();
409        let mean = weighted_sum / weight_sum;
410
411        // Compute effective sample size: ESS = (Σw)² / Σ(w²)
412        let weight_sq_sum: f64 = weights.iter().map(|w| w * w).sum();
413        let ess = (weight_sum * weight_sum) / weight_sq_sum;
414
415        // Standard error estimation using linearization (delta method)
416        // For ratio estimator, var(μ̂) ≈ 1/n * Σ w_i² (f_i - μ̂)² / (mean(w))²
417        let mean_weight = weight_sum / self.n_samples as f64;
418        let variance: f64 = weights
419            .iter()
420            .zip(values.iter())
421            .map(|(w, v)| {
422                let normalized_w = w / mean_weight;
423                normalized_w * normalized_w * (v - mean) * (v - mean)
424            })
425            .sum::<f64>()
426            / (self.n_samples as f64 * self.n_samples as f64);
427
428        let std_error = variance.sqrt();
429
430        let mut result = MonteCarloResult::new(mean, std_error, self.n_samples);
431        // Report effective sample size as variance reduction indicator
432        result = result.with_variance_reduction(ess / self.n_samples as f64);
433        result
434    }
435
436    /// Stratified sampling.
437    fn run_stratified<F>(&self, f: &F, num_strata: usize, rng: &mut SimRng) -> MonteCarloResult
438    where
439        F: Fn(f64) -> f64,
440    {
441        let samples_per_stratum = self.n_samples / num_strata;
442        let stratum_width = 1.0 / num_strata as f64;
443
444        let mut sum = 0.0;
445        let mut sum_sq = 0.0;
446        let mut total_samples = 0;
447
448        for i in 0..num_strata {
449            let stratum_start = i as f64 * stratum_width;
450            let mut stratum_sum = 0.0;
451            let mut stratum_sum_sq = 0.0;
452
453            for _ in 0..samples_per_stratum {
454                let u = stratum_start + rng.gen_f64() * stratum_width;
455                let value = f(u);
456                stratum_sum += value;
457                stratum_sum_sq += value * value;
458                total_samples += 1;
459            }
460
461            let stratum_mean = stratum_sum / samples_per_stratum as f64;
462            sum += stratum_mean;
463            sum_sq += stratum_sum_sq / samples_per_stratum as f64;
464        }
465
466        let mean = sum / num_strata as f64;
467        let variance = (sum_sq / num_strata as f64) - mean * mean;
468        let std_error = (variance / self.n_samples as f64).sqrt();
469
470        MonteCarloResult::new(mean, std_error, total_samples)
471    }
472
473    /// Get configured sample count.
474    #[must_use]
475    pub const fn n_samples(&self) -> usize {
476        self.n_samples
477    }
478}
479
480// =============================================================================
481// Work-Stealing Monte Carlo (Section 4.3.5)
482// =============================================================================
483
484/// Individual simulation task for work-stealing scheduler.
485#[derive(Debug, Clone)]
486pub struct SimulationTask {
487    /// Random seed for this trajectory.
488    pub seed: u64,
489    /// Task index.
490    pub index: usize,
491}
492
493/// Work-stealing Monte Carlo scheduler [55].
494///
495/// Implements Heijunka (load leveling) for variable-duration simulations.
496/// Uses crossbeam-deque for lock-free work stealing to handle the "straggler problem"
497/// where threads would otherwise wait for the slowest simulation.
498#[derive(Debug)]
499pub struct WorkStealingMonteCarlo {
500    /// Number of worker threads.
501    num_workers: usize,
502}
503
504impl Default for WorkStealingMonteCarlo {
505    fn default() -> Self {
506        Self::new()
507    }
508}
509
510impl WorkStealingMonteCarlo {
511    /// Create with default number of workers (number of CPUs).
512    #[must_use]
513    pub fn new() -> Self {
514        Self {
515            num_workers: std::thread::available_parallelism()
516                .map(std::num::NonZero::get)
517                .unwrap_or(4),
518        }
519    }
520
521    /// Create with specified number of workers.
522    #[must_use]
523    pub const fn with_workers(num_workers: usize) -> Self {
524        Self { num_workers }
525    }
526
527    /// Execute Monte Carlo simulation with work stealing [55].
528    ///
529    /// Tasks are distributed across workers, and idle workers steal tasks
530    /// from busy workers to maintain load balance (Heijunka).
531    pub fn execute<F, R>(&self, n_samples: usize, simulate: F) -> Vec<R>
532    where
533        F: Fn(SimulationTask) -> R + Sync,
534        R: Send,
535    {
536        use crossbeam_deque::{Injector, Stealer, Worker};
537
538        // Global work queue
539        let injector: Injector<SimulationTask> = Injector::new();
540
541        // Per-worker local queues
542        let workers: Vec<Worker<SimulationTask>> =
543            (0..self.num_workers).map(|_| Worker::new_fifo()).collect();
544
545        // Stealers for cross-worker theft
546        let stealers: Vec<Stealer<SimulationTask>> = workers.iter().map(Worker::stealer).collect();
547
548        // Populate global queue
549        for index in 0..n_samples {
550            injector.push(SimulationTask {
551                seed: index as u64,
552                index,
553            });
554        }
555
556        // Results storage - use Vec with push instead of pre-allocation
557        let results: std::sync::Mutex<Vec<(usize, R)>> =
558            std::sync::Mutex::new(Vec::with_capacity(n_samples));
559
560        std::thread::scope(|s| {
561            for (worker_id, worker) in workers.into_iter().enumerate() {
562                let injector = &injector;
563                let stealers = &stealers;
564                let results = &results;
565                let simulate = &simulate;
566
567                s.spawn(move || {
568                    loop {
569                        // Try local queue first
570                        let task = worker
571                            .pop()
572                            .or_else(|| {
573                                // Try global queue
574                                loop {
575                                    match injector.steal() {
576                                        crossbeam_deque::Steal::Success(task) => return Some(task),
577                                        crossbeam_deque::Steal::Empty => break,
578                                        crossbeam_deque::Steal::Retry => {}
579                                    }
580                                }
581                                None
582                            })
583                            .or_else(|| {
584                                // Steal from other workers (round-robin)
585                                for i in 0..stealers.len() {
586                                    let stealer_idx = (worker_id + i + 1) % stealers.len();
587                                    loop {
588                                        match stealers[stealer_idx].steal() {
589                                            crossbeam_deque::Steal::Success(task) => {
590                                                return Some(task)
591                                            }
592                                            crossbeam_deque::Steal::Empty => break,
593                                            crossbeam_deque::Steal::Retry => {}
594                                        }
595                                    }
596                                }
597                                None
598                            });
599
600                        match task {
601                            Some(task) => {
602                                let index = task.index;
603                                let result = simulate(task);
604                                if let Ok(mut guard) = results.lock() {
605                                    guard.push((index, result));
606                                }
607                            }
608                            None => break, // No more work
609                        }
610                    }
611                });
612            }
613        });
614
615        // Sort by index and extract results
616        let mut indexed_results = results.into_inner().unwrap_or_default();
617        indexed_results.sort_by_key(|(idx, _)| *idx);
618        indexed_results.into_iter().map(|(_, r)| r).collect()
619    }
620
621    /// Execute with statistics collection.
622    ///
623    /// Returns results and basic statistics about the simulation.
624    pub fn execute_with_stats<F>(
625        &self,
626        n_samples: usize,
627        simulate: F,
628    ) -> (Vec<f64>, MonteCarloResult)
629    where
630        F: Fn(SimulationTask) -> f64 + Sync,
631    {
632        let results = self.execute(n_samples, simulate);
633
634        let n = results.len() as f64;
635        let sum: f64 = results.iter().sum();
636        let mean = sum / n;
637
638        let variance: f64 = results.iter().map(|x| (x - mean).powi(2)).sum::<f64>() / (n - 1.0);
639        let std_error = (variance / n).sqrt();
640
641        let mc_result = MonteCarloResult::new(mean, std_error, results.len());
642
643        (results, mc_result)
644    }
645
646    /// Get number of workers.
647    #[must_use]
648    pub const fn num_workers(&self) -> usize {
649        self.num_workers
650    }
651}
652
653#[cfg(test)]
654mod tests {
655    use super::*;
656
657    #[test]
658    fn test_mc_result_creation() {
659        let result = MonteCarloResult::new(0.5, 0.01, 10000);
660
661        assert!((result.estimate - 0.5).abs() < f64::EPSILON);
662        assert!((result.std_error - 0.01).abs() < f64::EPSILON);
663        assert_eq!(result.samples, 10000);
664
665        // 95% CI = estimate ± 1.96 * std_error
666        assert!((result.confidence_interval.0 - 0.4804).abs() < 0.001);
667        assert!((result.confidence_interval.1 - 0.5196).abs() < 0.001);
668    }
669
670    #[test]
671    fn test_mc_contains() {
672        let result = MonteCarloResult::new(0.5, 0.01, 10000);
673
674        assert!(result.contains(0.5));
675        assert!(result.contains(0.49));
676        assert!(!result.contains(0.4));
677    }
678
679    #[test]
680    fn test_standard_mc_uniform() {
681        let engine = MonteCarloEngine::with_samples(100_000);
682        let mut rng = SimRng::new(42);
683
684        // E[U] = 0.5 for U ~ Uniform(0,1)
685        let result = engine.run(|x| x, &mut rng);
686
687        assert!((result.estimate - 0.5).abs() < 0.01);
688        assert!(result.std_error < 0.01);
689    }
690
691    #[test]
692    fn test_standard_mc_square() {
693        let engine = MonteCarloEngine::with_samples(100_000);
694        let mut rng = SimRng::new(42);
695
696        // E[U^2] = 1/3 for U ~ Uniform(0,1)
697        let result = engine.run(|x| x * x, &mut rng);
698
699        assert!((result.estimate - 1.0 / 3.0).abs() < 0.01);
700    }
701
702    #[test]
703    fn test_antithetic_variates() {
704        let engine = MonteCarloEngine::new(100_000, VarianceReduction::Antithetic);
705        let mut rng = SimRng::new(42);
706
707        // Antithetic should reduce variance for monotonic functions
708        let result = engine.run(|x| x, &mut rng);
709
710        // Should still get correct estimate
711        assert!((result.estimate - 0.5).abs() < 0.01);
712
713        // Variance reduction factor should be > 1
714        if let Some(vrf) = result.variance_reduction_factor {
715            assert!(vrf > 1.0, "Expected variance reduction, got factor {}", vrf);
716        }
717    }
718
719    #[test]
720    fn test_stratified_sampling() {
721        let engine =
722            MonteCarloEngine::new(100_000, VarianceReduction::Stratified { num_strata: 10 });
723        let mut rng = SimRng::new(42);
724
725        let result = engine.run(|x| x * x, &mut rng);
726
727        assert!((result.estimate - 1.0 / 3.0).abs() < 0.01);
728    }
729
730    #[test]
731    fn test_importance_sampling() {
732        // Example: estimate E[x^4] under Uniform(0,1) using Beta(2,1) proposal
733        // Target: p(x) = 1 (uniform)
734        // Proposal: q(x) = 2x (Beta(2,1) PDF)
735        // Likelihood ratio: p(x)/q(x) = 1/(2x)
736        // True value: E[x^4] = 1/5 = 0.2
737
738        fn sample_beta21(rng: &mut SimRng) -> f64 {
739            // Sample from Beta(2,1): use inverse CDF method
740            // CDF(x) = x^2, so inverse is sqrt(U)
741            rng.gen_f64().sqrt()
742        }
743
744        fn likelihood_ratio(x: f64) -> f64 {
745            if x < f64::EPSILON {
746                1.0
747            } else {
748                1.0 / (2.0 * x)
749            }
750        }
751
752        let engine = MonteCarloEngine::new(
753            100_000,
754            VarianceReduction::ImportanceSampling {
755                sample_fn: sample_beta21,
756                likelihood_ratio,
757            },
758        );
759        let mut rng = SimRng::new(42);
760
761        let result = engine.run(|x| x.powi(4), &mut rng);
762
763        // E[x^4] = 1/5 = 0.2
764        assert!(
765            (result.estimate - 0.2).abs() < 0.01,
766            "Expected ~0.2, got {}",
767            result.estimate
768        );
769    }
770
771    #[test]
772    fn test_importance_sampling_reduces_variance() {
773        // For functions peaked near x=1, sampling from Beta(2,1) should help
774        // because it samples more from higher x values
775
776        fn sample_beta21(rng: &mut SimRng) -> f64 {
777            rng.gen_f64().sqrt()
778        }
779
780        fn likelihood_ratio(x: f64) -> f64 {
781            if x < f64::EPSILON {
782                1.0
783            } else {
784                1.0 / (2.0 * x)
785            }
786        }
787
788        // Function peaked near x=1
789        let f = |x: f64| x.powi(10);
790        // True value: 1/11
791
792        let standard_engine = MonteCarloEngine::with_samples(10_000);
793        let is_engine = MonteCarloEngine::new(
794            10_000,
795            VarianceReduction::ImportanceSampling {
796                sample_fn: sample_beta21,
797                likelihood_ratio,
798            },
799        );
800
801        let mut rng1 = SimRng::new(42);
802        let mut rng2 = SimRng::new(42);
803
804        let standard_result = standard_engine.run(f, &mut rng1);
805        let is_result = is_engine.run(f, &mut rng2);
806
807        // Both should estimate approximately 1/11 ≈ 0.0909
808        let true_value = 1.0 / 11.0;
809        assert!((standard_result.estimate - true_value).abs() < 0.02);
810        assert!((is_result.estimate - true_value).abs() < 0.02);
811
812        // IS should have lower variance for this function
813        // (not always guaranteed, but likely for peaked functions)
814    }
815
816    #[test]
817    fn test_self_normalizing_importance_sampling() {
818        // Self-normalizing IS is useful when we don't know the normalizing constant
819        // Test: E[x] under Uniform(0,1) using unnormalized weights
820
821        fn sample_uniform(rng: &mut SimRng) -> f64 {
822            rng.gen_f64()
823        }
824
825        fn weight_fn(x: f64) -> f64 {
826            // Uniform weights = 1.0 everywhere
827            // This should give same result as standard MC
828            1.0 + 0.0 * x // Use x to prevent unused warning
829        }
830
831        let engine = MonteCarloEngine::new(
832            100_000,
833            VarianceReduction::SelfNormalizingIS {
834                sample_fn: sample_uniform,
835                weight_fn,
836            },
837        );
838        let mut rng = SimRng::new(42);
839
840        let result = engine.run(|x| x, &mut rng);
841
842        // E[x] = 0.5
843        assert!(
844            (result.estimate - 0.5).abs() < 0.01,
845            "Expected ~0.5, got {}",
846            result.estimate
847        );
848
849        // With uniform weights, ESS should be close to n
850        if let Some(ess_ratio) = result.variance_reduction_factor {
851            assert!(
852                ess_ratio > 0.9,
853                "ESS ratio should be near 1 for uniform weights, got {}",
854                ess_ratio
855            );
856        }
857    }
858
859    #[test]
860    fn test_self_normalizing_is_weighted() {
861        // Test with non-uniform weights
862        // Use linear weight w(x) = x to emphasize larger values
863
864        fn sample_uniform(rng: &mut SimRng) -> f64 {
865            rng.gen_f64()
866        }
867
868        fn weight_fn(x: f64) -> f64 {
869            // Weight proportional to x
870            // This reweights to emphasize larger x values
871            x.max(0.001) // Avoid zero weights
872        }
873
874        let engine = MonteCarloEngine::new(
875            100_000,
876            VarianceReduction::SelfNormalizingIS {
877                sample_fn: sample_uniform,
878                weight_fn,
879            },
880        );
881        let mut rng = SimRng::new(42);
882
883        // E_weighted[f(x)] where weight ∝ x
884        // With w(x) = x and f(x) = 1: E = ∫x dx / ∫x dx = 1 (trivial)
885        // With w(x) = x and f(x) = x: E = ∫x² dx / ∫x dx = (1/3)/(1/2) = 2/3
886        let result = engine.run(|x| x, &mut rng);
887
888        // Should be approximately 2/3
889        assert!(
890            (result.estimate - 2.0 / 3.0).abs() < 0.02,
891            "Expected ~0.667, got {}",
892            result.estimate
893        );
894
895        // ESS should be less than n due to weight variation
896        if let Some(ess_ratio) = result.variance_reduction_factor {
897            assert!(
898                ess_ratio < 1.0,
899                "ESS ratio should be < 1 for varied weights"
900            );
901        }
902    }
903
904    #[test]
905    fn test_multidimensional_mc() {
906        let engine = MonteCarloEngine::with_samples(100_000);
907        let mut rng = SimRng::new(42);
908
909        // Estimate volume of unit hypercube in 3D (should be 1.0)
910        let result = engine.run_nd(3, |_x| 1.0, &mut rng);
911
912        assert!((result.estimate - 1.0).abs() < 0.01);
913    }
914
915    #[test]
916    fn test_mc_pi_estimation() {
917        let engine = MonteCarloEngine::with_samples(100_000);
918        let mut rng = SimRng::new(42);
919
920        // Estimate pi using quarter circle
921        // Area of quarter unit circle = pi/4
922        let result = engine.run_nd(
923            2,
924            |x| {
925                if x[0] * x[0] + x[1] * x[1] <= 1.0 {
926                    4.0
927                } else {
928                    0.0
929                }
930            },
931            &mut rng,
932        );
933
934        assert!((result.estimate - std::f64::consts::PI).abs() < 0.05);
935    }
936
937    #[test]
938    fn test_convergence_rate() {
939        // Monte Carlo should converge at O(n^{-1/2})
940        let mut rng = SimRng::new(42);
941
942        let engine_small = MonteCarloEngine::with_samples(1_000);
943        let engine_large = MonteCarloEngine::with_samples(100_000);
944
945        let result_small = engine_small.run(|x| x * x, &mut rng);
946        let result_large = engine_large.run(|x| x * x, &mut rng);
947
948        // Error should decrease by ~sqrt(100) = 10
949        let ratio = result_small.std_error / result_large.std_error;
950        assert!(
951            ratio > 5.0 && ratio < 20.0,
952            "Expected error ratio ~10, got {}",
953            ratio
954        );
955    }
956
957    // === Work-Stealing Monte Carlo Tests (Section 4.3.5) ===
958
959    #[test]
960    fn test_work_stealing_basic() {
961        let ws = WorkStealingMonteCarlo::with_workers(4);
962
963        let results = ws.execute(100, |task| task.index * 2);
964
965        assert_eq!(results.len(), 100);
966        for (i, &r) in results.iter().enumerate() {
967            assert_eq!(r, i * 2);
968        }
969    }
970
971    #[test]
972    fn test_work_stealing_pi_estimation() {
973        let ws = WorkStealingMonteCarlo::with_workers(4);
974
975        // Estimate pi using quarter circle
976        let (results, stats) = ws.execute_with_stats(100_000, |task| {
977            let mut rng = SimRng::new(task.seed);
978            let x = rng.gen_f64();
979            let y = rng.gen_f64();
980            if x * x + y * y <= 1.0 {
981                4.0
982            } else {
983                0.0
984            }
985        });
986
987        assert_eq!(results.len(), 100_000);
988        assert!(
989            (stats.estimate - std::f64::consts::PI).abs() < 0.1,
990            "Pi estimate {} too far from actual",
991            stats.estimate
992        );
993    }
994
995    #[test]
996    fn test_work_stealing_variable_duration() {
997        let ws = WorkStealingMonteCarlo::with_workers(4);
998
999        // Simulate variable-duration tasks (some take longer)
1000        let results: Vec<u64> = ws.execute(50, |task| {
1001            // Simulate some "work" - longer tasks should be stolen
1002            let mut sum = 0u64;
1003            let iterations = if task.index % 10 == 0 { 10000 } else { 100 };
1004            for i in 0..iterations {
1005                sum = sum.wrapping_add(i);
1006            }
1007            sum
1008        });
1009
1010        assert_eq!(results.len(), 50);
1011    }
1012
1013    #[test]
1014    fn test_work_stealing_num_workers() {
1015        let ws_default = WorkStealingMonteCarlo::new();
1016        assert!(ws_default.num_workers() > 0);
1017
1018        let ws_custom = WorkStealingMonteCarlo::with_workers(8);
1019        assert_eq!(ws_custom.num_workers(), 8);
1020    }
1021
1022    // === Additional Coverage Tests ===
1023
1024    #[test]
1025    fn test_mc_result_relative_error() {
1026        let result = MonteCarloResult::new(0.5, 0.01, 10000);
1027        let rel_err = result.relative_error();
1028        // relative_error = std_error / |estimate| = 0.01 / 0.5 = 0.02
1029        assert!((rel_err - 0.02).abs() < f64::EPSILON);
1030    }
1031
1032    #[test]
1033    fn test_mc_result_relative_error_zero_estimate() {
1034        let result = MonteCarloResult::new(0.0, 0.01, 10000);
1035        let rel_err = result.relative_error();
1036        // When estimate is zero, relative_error = std_error
1037        assert!((rel_err - 0.01).abs() < f64::EPSILON);
1038    }
1039
1040    #[test]
1041    fn test_mc_result_with_variance_reduction() {
1042        let result = MonteCarloResult::new(0.5, 0.01, 10000).with_variance_reduction(2.0);
1043        assert!(result.variance_reduction_factor.is_some());
1044        assert!((result.variance_reduction_factor.unwrap() - 2.0).abs() < f64::EPSILON);
1045    }
1046
1047    #[test]
1048    fn test_mc_result_clone() {
1049        let result = MonteCarloResult::new(0.5, 0.01, 10000);
1050        let cloned = result.clone();
1051        assert!((cloned.estimate - result.estimate).abs() < f64::EPSILON);
1052        assert_eq!(cloned.samples, result.samples);
1053    }
1054
1055    #[test]
1056    fn test_mc_result_debug() {
1057        let result = MonteCarloResult::new(0.5, 0.01, 10000);
1058        let debug = format!("{:?}", result);
1059        assert!(debug.contains("MonteCarloResult"));
1060        assert!(debug.contains("estimate"));
1061    }
1062
1063    #[test]
1064    fn test_variance_reduction_default() {
1065        let vr = VarianceReduction::default();
1066        match vr {
1067            VarianceReduction::None => {} // Expected
1068            _ => panic!("Default should be None"),
1069        }
1070    }
1071
1072    #[test]
1073    fn test_variance_reduction_clone() {
1074        let vr = VarianceReduction::Antithetic;
1075        let cloned = vr.clone();
1076        match cloned {
1077            VarianceReduction::Antithetic => {} // Expected
1078            _ => panic!("Clone should preserve variant"),
1079        }
1080    }
1081
1082    #[test]
1083    fn test_variance_reduction_debug() {
1084        let vr = VarianceReduction::Antithetic;
1085        let debug = format!("{:?}", vr);
1086        assert!(debug.contains("Antithetic"));
1087
1088        let vr = VarianceReduction::Stratified { num_strata: 10 };
1089        let debug = format!("{:?}", vr);
1090        assert!(debug.contains("Stratified"));
1091    }
1092
1093    #[test]
1094    fn test_mc_engine_debug() {
1095        let engine = MonteCarloEngine::with_samples(1000);
1096        let debug = format!("{:?}", engine);
1097        assert!(debug.contains("MonteCarloEngine"));
1098    }
1099
1100    #[test]
1101    fn test_control_variate() {
1102        // Test control variate with a simple known case
1103        // Estimate E[x^2] using x as control variate
1104        // E[x] = 0.5, E[x^2] = 1/3
1105
1106        fn control_fn(x: f64) -> f64 {
1107            x
1108        }
1109        let control_expectation = 0.5;
1110
1111        let engine = MonteCarloEngine::new(
1112            100_000,
1113            VarianceReduction::ControlVariate {
1114                control_fn,
1115                expectation: control_expectation,
1116            },
1117        );
1118        let mut rng = SimRng::new(42);
1119
1120        let result = engine.run(|x| x * x, &mut rng);
1121        // E[x^2] = 1/3
1122        assert!(
1123            (result.estimate - 1.0 / 3.0).abs() < 0.01,
1124            "Expected ~0.333, got {}",
1125            result.estimate
1126        );
1127    }
1128
1129    #[test]
1130    fn test_simulation_task_debug_clone() {
1131        let task = SimulationTask {
1132            index: 42,
1133            seed: 12345,
1134        };
1135        let cloned = task.clone();
1136        assert_eq!(cloned.index, 42);
1137        assert_eq!(cloned.seed, 12345);
1138
1139        let debug = format!("{:?}", task);
1140        assert!(debug.contains("SimulationTask"));
1141    }
1142
1143    #[test]
1144    fn test_work_stealing_debug() {
1145        let ws = WorkStealingMonteCarlo::with_workers(2);
1146        let debug = format!("{:?}", ws);
1147        assert!(debug.contains("WorkStealingMonteCarlo"));
1148    }
1149}
1150
1151#[cfg(test)]
1152mod proptests {
1153    use super::*;
1154    use proptest::prelude::*;
1155
1156    proptest! {
1157        /// Falsification: MC estimate should be within confidence interval
1158        /// with high probability.
1159        #[test]
1160        fn prop_mc_confidence_interval(seed in 0u64..10000) {
1161            let engine = MonteCarloEngine::with_samples(100_000); // More samples
1162            let mut rng = SimRng::new(seed);
1163
1164            // Known integral: integral of x from 0 to 1 = 0.5
1165            let result = engine.run(|x| x, &mut rng);
1166
1167            // Check if true value is within CI
1168            let true_value = 0.5;
1169            let error = (result.estimate - true_value).abs();
1170
1171            // Use 5 sigma for very lenient test (99.99994% coverage)
1172            prop_assert!(error < 5.0 * result.std_error,
1173                "Error {} exceeds 5 sigma = {}", error, 5.0 * result.std_error);
1174        }
1175
1176        /// Falsification: standard error decreases with more samples.
1177        #[test]
1178        fn prop_mc_error_decreases(seed in 0u64..1000, n_small in 100usize..1000) {
1179            let n_large = n_small * 10;
1180
1181            let engine_small = MonteCarloEngine::with_samples(n_small);
1182            let engine_large = MonteCarloEngine::with_samples(n_large);
1183
1184            let mut rng1 = SimRng::new(seed);
1185            let mut rng2 = SimRng::new(seed + 1);
1186
1187            let result_small = engine_small.run(|x| x * x, &mut rng1);
1188            let result_large = engine_large.run(|x| x * x, &mut rng2);
1189
1190            // Standard error should decrease (not always strictly due to randomness)
1191            // But on average it should hold
1192            // We use a lenient check
1193            prop_assert!(result_large.std_error < result_small.std_error * 2.0,
1194                "Large std_error {} should be less than small {} * 2",
1195                result_large.std_error, result_small.std_error);
1196        }
1197    }
1198}