Skip to main content

scirs2_core/random/
ecosystem_integration.rs

1//! Ecosystem integration utilities for seamless SCIRS2 module interoperability
2//!
3//! This module provides bridge functions, adapters, and trait implementations that enable
4//! the random number generation system to integrate seamlessly with other SCIRS2 modules
5//! including scirs2-linalg, scirs2-stats, scirs2-neural, scirs2-optimize, and more.
6//!
7//! # Design Philosophy
8//!
9//! - **Zero-copy**: Minimize data copying between modules
10//! - **Type-safe**: Compile-time guarantees for cross-module operations
11//! - **Performance**: Optimized for high-throughput scientific computing
12//! - **Ergonomic**: Simple, intuitive APIs for common workflows
13//!
14//! # Integration Patterns
15//!
16//! 1. **Random Matrix Generation**: For linear algebra operations
17//! 2. **Statistical Distribution Sampling**: For statistical analysis
18//! 3. **Neural Network Initialization**: For deep learning workflows
19//! 4. **Optimization Noise**: For stochastic optimization algorithms
20//! 5. **Scientific Simulation**: For Monte Carlo and sampling methods
21//!
22//! # Examples
23//!
24//! ```rust
25//! use scirs2_core::random::ecosystem_integration::*;
26//!
27//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
28//! // Linear algebra integration
29//! let random_matrix = LinalgBridge::random_symmetric_matrix(10, 42)?; // Smaller for doc test
30//! let eigenvalues = LinalgBridge::random_eigenvalue_problem(5, 1.0, 123)?; // eigenvalue_spread > 0.1
31//!
32//! // Statistical analysis integration
33//! let experiment = StatsBridge::design_experiment()
34//!     .factors(&[vec![1.0, 2.0, 3.0], vec![0.1, 0.2, 0.3]])
35//!     .replications(3) // Smaller for doc test
36//!     .randomization_seed(42)
37//!     .build()?;
38//!
39//! // Neural network initialization
40//! let weights = NeuralBridge::xavier_initialization(&[10, 5, 2], 42)?; // Smaller for doc test
41//! let gradients = NeuralBridge::gradient_noise_injection(0.01, &weights, 123)?;
42//! # Ok(())
43//! # }
44//! ```
45
46use crate::random::{
47    advanced_numerical::*,
48    arrays::*,
49    core::{seeded_rng, Random},
50    distributions::*,
51    parallel::{ParallelRng, ThreadLocalRngPool},
52    scientific::*,
53};
54use ::ndarray::{Array1, Array2, Array3, ArrayD, Dimension, Ix2};
55use rand::{Rng, RngExt};
56use rand_distr::{Distribution, Normal, Uniform};
57use std::collections::HashMap;
58
59/// Bridge for linear algebra operations requiring random number generation
60pub struct LinalgBridge;
61
62impl LinalgBridge {
63    /// Generate a random symmetric positive definite matrix
64    pub fn random_symmetric_positive_definite(
65        size: usize,
66        seed: u64,
67    ) -> Result<Array2<f64>, String> {
68        let mut rng = seeded_rng(seed);
69
70        // Generate random matrix A
71        let mut a = Array2::zeros((size, size));
72        for i in 0..size {
73            for j in 0..size {
74                a[[i, j]] = rng.sample(Normal::new(0.0, 1.0).expect("Operation failed"));
75            }
76        }
77
78        // Compute A * A^T to ensure positive definiteness
79        let at = a.t();
80        let mut result = Array2::zeros((size, size));
81
82        for i in 0..size {
83            for j in 0..size {
84                let mut sum = 0.0;
85                for k in 0..size {
86                    sum += a[[i, k]] * at[[k, j]];
87                }
88                result[[i, j]] = sum;
89            }
90            // Add small diagonal regularization
91            result[[i, i]] += 1e-6;
92        }
93
94        Ok(result)
95    }
96
97    /// Generate random symmetric matrix
98    pub fn random_symmetric_matrix(size: usize, seed: u64) -> Result<Array2<f64>, String> {
99        let mut rng = seeded_rng(seed);
100        let mut matrix = Array2::zeros((size, size));
101
102        for i in 0..size {
103            for j in i..size {
104                let value = rng.sample(Normal::new(0.0, 1.0).expect("Operation failed"));
105                matrix[[i, j]] = value;
106                matrix[[j, i]] = value;
107            }
108        }
109
110        Ok(matrix)
111    }
112
113    /// Generate random orthogonal matrix using QR decomposition
114    pub fn random_orthogonal_matrix(size: usize, seed: u64) -> Result<Array2<f64>, String> {
115        let mut rng = seeded_rng(seed);
116
117        // Generate random matrix
118        let mut a = Array2::zeros((size, size));
119        for i in 0..size {
120            for j in 0..size {
121                a[[i, j]] = rng.sample(Normal::new(0.0, 1.0).expect("Operation failed"));
122            }
123        }
124
125        // Gram-Schmidt orthogonalization
126        let mut q = Array2::zeros((size, size));
127
128        for j in 0..size {
129            // Copy column j from A
130            let mut v = Array1::zeros(size);
131            for i in 0..size {
132                v[i] = a[[i, j]];
133            }
134
135            // Subtract projections onto previous columns
136            for k in 0..j {
137                let mut proj = 0.0;
138                for i in 0..size {
139                    proj += v[i] * q[[i, k]];
140                }
141                for i in 0..size {
142                    v[i] -= proj * q[[i, k]];
143                }
144            }
145
146            // Normalize
147            let norm = (v.iter().map(|x| x * x).sum::<f64>()).sqrt();
148            if norm > 1e-10 {
149                for i in 0..size {
150                    q[[i, j]] = v[i] / norm;
151                }
152            }
153        }
154
155        Ok(q)
156    }
157
158    /// Generate random eigenvalue problem (A, eigenvalues, eigenvectors)
159    pub fn random_eigenvalue_problem(
160        size: usize,
161        eigenvalue_spread: f64,
162        seed: u64,
163    ) -> Result<(Array2<f64>, Vec<f64>, Array2<f64>), String> {
164        let mut rng = seeded_rng(seed);
165
166        // Generate random eigenvalues
167        let mut eigenvalues = Vec::with_capacity(size);
168        for _ in 0..size {
169            eigenvalues
170                .push(rng.sample(Uniform::new(0.1, eigenvalue_spread).expect("Operation failed")));
171        }
172        eigenvalues.sort_by(|a, b| b.partial_cmp(a).expect("Operation failed")); // Sort descending
173
174        // Generate random orthogonal eigenvector matrix
175        let eigenvectors = Self::random_orthogonal_matrix(size, seed + 1)?;
176
177        // Construct matrix A = V * D * V^T
178        let mut diagonal = Array2::zeros((size, size));
179        for i in 0..size {
180            diagonal[[i, i]] = eigenvalues[i];
181        }
182
183        // A = V * D * V^T
184        let mut vd = Array2::zeros((size, size));
185        for i in 0..size {
186            for j in 0..size {
187                let mut sum = 0.0;
188                for k in 0..size {
189                    sum += eigenvectors[[i, k]] * diagonal[[k, j]];
190                }
191                vd[[i, j]] = sum;
192            }
193        }
194
195        let mut a = Array2::zeros((size, size));
196        for i in 0..size {
197            for j in 0..size {
198                let mut sum = 0.0;
199                for k in 0..size {
200                    sum += vd[[i, k]] * eigenvectors[[j, k]]; // V^T = V transpose
201                }
202                a[[i, j]] = sum;
203            }
204        }
205
206        Ok((a, eigenvalues, eigenvectors))
207    }
208
209    /// Generate random sparse matrix with controlled sparsity
210    pub fn random_sparse_matrix(
211        rows: usize,
212        cols: usize,
213        density: f64,
214        seed: u64,
215    ) -> Result<Vec<(usize, usize, f64)>, String> {
216        if !(0.0..=1.0).contains(&density) {
217            return Err("Density must be between 0 and 1".to_string());
218        }
219
220        let mut rng = seeded_rng(seed);
221        let mut triplets = Vec::new();
222
223        let total_elements = rows * cols;
224        let nnz = (total_elements as f64 * density) as usize;
225
226        for _ in 0..nnz {
227            let row = rng.sample(Uniform::new(0, rows).expect("Operation failed"));
228            let col = rng.sample(Uniform::new(0, cols).expect("Operation failed"));
229            let value = rng.sample(Normal::new(0.0, 1.0).expect("Operation failed"));
230            triplets.push((row, col, value));
231        }
232
233        Ok(triplets)
234    }
235}
236
237/// Bridge for statistical analysis operations
238pub struct StatsBridge;
239
240impl StatsBridge {
241    /// Create a comprehensive experimental design
242    pub fn design_experiment() -> ExperimentDesignBuilder {
243        ExperimentDesignBuilder::new()
244    }
245
246    /// Generate synthetic dataset with known statistical properties
247    pub fn synthetic_dataset(
248        properties: DatasetProperties,
249        seed: u64,
250    ) -> Result<SyntheticDataset, String> {
251        let mut rng = seeded_rng(seed);
252        let mut data = HashMap::new();
253
254        // Generate features according to specifications
255        for (name, spec) in properties.features.iter() {
256            let feature_data = match &spec.distribution {
257                FeatureDistribution::Normal { mean, std } => {
258                    let normal = Normal::new(*mean, *std).expect("Operation failed");
259                    (0..properties.n_samples)
260                        .map(|_| rng.sample(normal))
261                        .collect::<Vec<f64>>()
262                }
263                FeatureDistribution::Uniform { low, high } => {
264                    let uniform = Uniform::new(*low, *high).expect("Operation failed");
265                    (0..properties.n_samples)
266                        .map(|_| rng.sample(uniform))
267                        .collect::<Vec<f64>>()
268                }
269                FeatureDistribution::Beta { alpha, beta } => {
270                    let beta_dist = Beta::new(*alpha, *beta)?;
271                    (0..properties.n_samples)
272                        .map(|_| beta_dist.sample(&mut rng))
273                        .collect::<Vec<f64>>()
274                }
275                FeatureDistribution::Categorical {
276                    categories,
277                    weights,
278                } => {
279                    let categorical = Categorical::new(weights.clone())?;
280                    (0..properties.n_samples)
281                        .map(|_| categorical.sample(&mut rng) as f64)
282                        .collect::<Vec<f64>>()
283                }
284            };
285            data.insert(name.clone(), feature_data);
286        }
287
288        Ok(SyntheticDataset { data, properties })
289    }
290
291    /// Generate correlated multivariate dataset
292    pub fn correlated_dataset(
293        means: Vec<f64>,
294        correlation_matrix: Vec<Vec<f64>>,
295        n_samples: usize,
296        seed: u64,
297    ) -> Result<Array2<f64>, String> {
298        let mvn = MultivariateNormal::new(means, correlation_matrix)?;
299        let mut rng = seeded_rng(seed);
300
301        let mut samples = Array2::zeros((n_samples, mvn.dimension()));
302        for i in 0..n_samples {
303            let sample = mvn.sample(&mut rng);
304            for j in 0..sample.len() {
305                samples[[i, j]] = sample[j];
306            }
307        }
308
309        Ok(samples)
310    }
311
312    /// Bootstrap confidence intervals
313    pub fn bootstrap_confidence_interval<F>(
314        data: &[f64],
315        statistic: F,
316        confidence_level: f64,
317        n_bootstrap: usize,
318        seed: u64,
319    ) -> Result<(f64, f64, f64), String>
320    where
321        F: Fn(&[f64]) -> f64 + Send + Sync,
322    {
323        let _pool = ThreadLocalRngPool::new(seed);
324
325        // Each bootstrap iteration uses a unique seed derived from base seed + iteration index.
326        // Using the same seed for every iteration would generate identical resamples, producing
327        // degenerate confidence intervals.
328        let bootstrap_stats: Vec<f64> = (0..n_bootstrap)
329            .map(|iter_idx| {
330                let mut rng = seeded_rng(seed.wrapping_add(iter_idx as u64));
331                use crate::random::slice_ops::ScientificSliceRandom;
332                let bootstrap_sample =
333                    data.scientific_sample_with_replacement(&mut rng, data.len());
334                let sample_values: Vec<f64> = bootstrap_sample.iter().map(|&&x| x).collect();
335                statistic(&sample_values)
336            })
337            .collect();
338
339        let mut sorted_stats = bootstrap_stats;
340        sorted_stats.sort_by(|a, b| a.partial_cmp(b).expect("Operation failed"));
341
342        let alpha = 1.0 - confidence_level;
343        let lower_idx = (alpha / 2.0 * sorted_stats.len() as f64) as usize;
344        let upper_idx = ((1.0 - alpha / 2.0) * sorted_stats.len() as f64) as usize;
345
346        let lower_bound = sorted_stats[lower_idx];
347        let upper_bound = sorted_stats[upper_idx.min(sorted_stats.len() - 1)];
348        let point_estimate = statistic(data);
349
350        Ok((point_estimate, lower_bound, upper_bound))
351    }
352}
353
354/// Bridge for neural network operations
355pub struct NeuralBridge;
356
357impl NeuralBridge {
358    /// Xavier/Glorot weight initialization
359    pub fn xavier_initialization(
360        layer_sizes: &[usize],
361        seed: u64,
362    ) -> Result<Vec<Array2<f64>>, String> {
363        let mut rng = seeded_rng(seed);
364        let mut weights = Vec::new();
365
366        for i in 0..layer_sizes.len() - 1 {
367            let fan_in = layer_sizes[i] as f64;
368            let fan_out = layer_sizes[i + 1] as f64;
369            let std = (2.0 / (fan_in + fan_out)).sqrt();
370
371            let normal = Normal::new(0.0, std).expect("Operation failed");
372            let mut weight_matrix = Array2::zeros((layer_sizes[i + 1], layer_sizes[i]));
373
374            for j in 0..layer_sizes[i + 1] {
375                for k in 0..layer_sizes[i] {
376                    weight_matrix[[j, k]] = rng.sample(normal);
377                }
378            }
379
380            weights.push(weight_matrix);
381        }
382
383        Ok(weights)
384    }
385
386    /// He/Kaiming weight initialization for ReLU networks
387    pub fn he_initialization(layer_sizes: &[usize], seed: u64) -> Result<Vec<Array2<f64>>, String> {
388        let mut rng = seeded_rng(seed);
389        let mut weights = Vec::new();
390
391        for i in 0..layer_sizes.len() - 1 {
392            let fan_in = layer_sizes[i] as f64;
393            let std = (2.0 / fan_in).sqrt();
394
395            let normal = Normal::new(0.0, std).expect("Operation failed");
396            let mut weight_matrix = Array2::zeros((layer_sizes[i + 1], layer_sizes[i]));
397
398            for j in 0..layer_sizes[i + 1] {
399                for k in 0..layer_sizes[i] {
400                    weight_matrix[[j, k]] = rng.sample(normal);
401                }
402            }
403
404            weights.push(weight_matrix);
405        }
406
407        Ok(weights)
408    }
409
410    /// LeCun weight initialization for SELU networks
411    pub fn lecun_initialization(
412        layer_sizes: &[usize],
413        seed: u64,
414    ) -> Result<Vec<Array2<f64>>, String> {
415        let mut rng = seeded_rng(seed);
416        let mut weights = Vec::new();
417
418        for i in 0..layer_sizes.len() - 1 {
419            let fan_in = layer_sizes[i] as f64;
420            let std = (1.0 / fan_in).sqrt();
421
422            let normal = Normal::new(0.0, std).expect("Operation failed");
423            let mut weight_matrix = Array2::zeros((layer_sizes[i + 1], layer_sizes[i]));
424
425            for j in 0..layer_sizes[i + 1] {
426                for k in 0..layer_sizes[i] {
427                    weight_matrix[[j, k]] = rng.sample(normal);
428                }
429            }
430
431            weights.push(weight_matrix);
432        }
433
434        Ok(weights)
435    }
436
437    /// Generate random dropout masks
438    pub fn dropout_masks(
439        shapes: &[(usize, usize)],
440        dropout_rate: f64,
441        seed: u64,
442    ) -> Result<Vec<Array2<f64>>, String> {
443        let mut rng = seeded_rng(seed);
444        let mut masks = Vec::new();
445
446        let keep_prob = 1.0 - dropout_rate;
447        let uniform = Uniform::new(0.0, 1.0).expect("Operation failed");
448
449        for &(rows, cols) in shapes {
450            let mut mask = Array2::zeros((rows, cols));
451            for i in 0..rows {
452                for j in 0..cols {
453                    mask[[i, j]] = if rng.sample(uniform) < keep_prob {
454                        1.0 / keep_prob
455                    } else {
456                        0.0
457                    };
458                }
459            }
460            masks.push(mask);
461        }
462
463        Ok(masks)
464    }
465
466    /// Gradient noise injection for improved generalization
467    pub fn gradient_noise_injection(
468        noise_scale: f64,
469        gradients: &[Array2<f64>],
470        seed: u64,
471    ) -> Result<Vec<Array2<f64>>, String> {
472        let mut rng = seeded_rng(seed);
473        let mut noisy_gradients = Vec::new();
474
475        let normal = Normal::new(0.0, noise_scale).expect("Operation failed");
476
477        for gradient in gradients {
478            let mut noisy_gradient = gradient.clone();
479            for elem in noisy_gradient.iter_mut() {
480                *elem += rng.sample(normal);
481            }
482            noisy_gradients.push(noisy_gradient);
483        }
484
485        Ok(noisy_gradients)
486    }
487
488    /// Generate random augmentation parameters
489    pub fn augmentation_parameters(
490        batch_size: usize,
491        config: AugmentationConfig,
492        seed: u64,
493    ) -> AugmentationBatch {
494        let mut rng = seeded_rng(seed);
495        let mut batch = AugmentationBatch::new(batch_size);
496
497        for _ in 0..batch_size {
498            let rotation = if config.rotation_range > 0.0 {
499                rng.sample(
500                    Uniform::new(-config.rotation_range, config.rotation_range)
501                        .expect("Operation failed"),
502                )
503            } else {
504                0.0
505            };
506
507            let scale = if config.scale_range.0 < config.scale_range.1 {
508                rng.sample(
509                    Uniform::new(config.scale_range.0, config.scale_range.1)
510                        .expect("Operation failed"),
511                )
512            } else {
513                1.0
514            };
515
516            let translation_x = if config.translation_range.0 > 0.0 {
517                rng.sample(
518                    Uniform::new(-config.translation_range.0, config.translation_range.0)
519                        .expect("Operation failed"),
520                )
521            } else {
522                0.0
523            };
524
525            let translation_y = if config.translation_range.1 > 0.0 {
526                rng.sample(
527                    Uniform::new(-config.translation_range.1, config.translation_range.1)
528                        .expect("Operation failed"),
529                )
530            } else {
531                0.0
532            };
533
534            batch.add_transform(AugmentationTransform {
535                rotation,
536                scale,
537                translation: (translation_x, translation_y),
538                horizontal_flip: rng.sample(Uniform::new(0.0, 1.0).expect("Operation failed"))
539                    < config.horizontal_flip_prob,
540                vertical_flip: rng.sample(Uniform::new(0.0, 1.0).expect("Operation failed"))
541                    < config.vertical_flip_prob,
542            });
543        }
544
545        batch
546    }
547}
548
549/// Bridge for optimization algorithms requiring randomness
550pub struct OptimizationBridge;
551
552impl OptimizationBridge {
553    /// Generate random initial population for genetic algorithms
554    pub fn genetic_algorithm_population<T>(
555        population_size: usize,
556        individual_generator: impl Fn(&mut Random<rand::rngs::StdRng>) -> T,
557        seed: u64,
558    ) -> Vec<T> {
559        let mut rng = seeded_rng(seed);
560        (0..population_size)
561            .map(|_| individual_generator(&mut rng))
562            .collect()
563    }
564
565    /// Generate random perturbations for simulated annealing
566    pub fn simulated_annealing_perturbation(
567        current_state: &[f64],
568        temperature: f64,
569        perturbation_scale: f64,
570        seed: u64,
571    ) -> Vec<f64> {
572        let mut rng = seeded_rng(seed);
573        let std = perturbation_scale * temperature.sqrt();
574        let normal = Normal::new(0.0, std).expect("Operation failed");
575
576        current_state
577            .iter()
578            .map(|&x| x + rng.sample(normal))
579            .collect()
580    }
581
582    /// Generate random directions for coordinate descent
583    pub fn random_coordinate_directions(
584        dimensions: usize,
585        n_directions: usize,
586        seed: u64,
587    ) -> Array2<f64> {
588        let mut rng = seeded_rng(seed);
589        let mut directions = Array2::zeros((n_directions, dimensions));
590
591        for i in 0..n_directions {
592            // Generate random unit vector
593            let mut direction = vec![0.0; dimensions];
594            for j in 0..dimensions {
595                direction[j] = rng.sample(Normal::new(0.0, 1.0).expect("Operation failed"));
596            }
597
598            // Normalize
599            let norm = direction.iter().map(|x| x * x).sum::<f64>().sqrt();
600            for j in 0..dimensions {
601                directions[[i, j]] = direction[j] / norm;
602            }
603        }
604
605        directions
606    }
607
608    /// Generate noise for parameter exploration in reinforcement learning
609    pub fn exploration_noise(
610        action_dimensions: usize,
611        noise_type: ExplorationNoiseType,
612        time_step: usize,
613        seed: u64,
614    ) -> Vec<f64> {
615        let mut rng = seeded_rng(seed + time_step as u64);
616
617        match noise_type {
618            ExplorationNoiseType::Gaussian { std } => {
619                let normal = Normal::new(0.0, std).expect("Operation failed");
620                (0..action_dimensions).map(|_| rng.sample(normal)).collect()
621            }
622            ExplorationNoiseType::OrnsteinUhlenbeck { theta, sigma, mu } => {
623                // Simplified OU process (would need state for full implementation)
624                let dt = 1.0;
625                let std = sigma * (2.0 * theta * dt).sqrt();
626                let normal = Normal::new(0.0, std).expect("Operation failed");
627                (0..action_dimensions)
628                    .map(|_| mu + rng.sample(normal))
629                    .collect()
630            }
631            ExplorationNoiseType::EpsilonGreedy { epsilon } => {
632                let uniform = Uniform::new(0.0, 1.0).expect("Operation failed");
633                (0..action_dimensions)
634                    .map(|_| {
635                        if rng.sample(uniform) < epsilon {
636                            1.0
637                        } else {
638                            0.0
639                        }
640                    })
641                    .collect()
642            }
643        }
644    }
645}
646
647// Supporting types and builders
648
649#[derive(Debug, Clone)]
650pub struct ExperimentDesignBuilder {
651    factors: Vec<Vec<f64>>,
652    replications: usize,
653    blocking_factors: Vec<String>,
654    randomization_seed: Option<u64>,
655    design_type: DesignType,
656}
657
658#[derive(Debug, Clone)]
659pub enum DesignType {
660    FullFactorial,
661    FractionalFactorial { fraction: f64 },
662    CentralComposite { alpha: f64 },
663    LatinHypercube,
664    RandomSampling { n_points: usize },
665}
666
667impl ExperimentDesignBuilder {
668    pub fn new() -> Self {
669        Self {
670            factors: Vec::new(),
671            replications: 1,
672            blocking_factors: Vec::new(),
673            randomization_seed: None,
674            design_type: DesignType::FullFactorial,
675        }
676    }
677
678    pub fn factors(mut self, factors: &[Vec<f64>]) -> Self {
679        self.factors = factors.to_vec();
680        self
681    }
682
683    pub fn replications(mut self, n: usize) -> Self {
684        self.replications = n;
685        self
686    }
687
688    pub fn randomization_seed(mut self, seed: u64) -> Self {
689        self.randomization_seed = Some(seed);
690        self
691    }
692
693    pub fn design_type(mut self, design: DesignType) -> Self {
694        self.design_type = design;
695        self
696    }
697
698    pub fn build(self) -> Result<ExperimentalDesign, String> {
699        let seed = self.randomization_seed.unwrap_or(42);
700
701        let design_points = match self.design_type {
702            DesignType::FullFactorial => {
703                crate::random::scientific::ExperimentalDesign::factorial_design(&self.factors)
704            }
705            DesignType::FractionalFactorial { fraction } => {
706                crate::random::scientific::ExperimentalDesign::fractional_factorial_design(
707                    &self.factors,
708                    fraction,
709                    seed,
710                )
711            }
712            DesignType::CentralComposite { alpha } => {
713                crate::random::scientific::ExperimentalDesign::central_composite_design(
714                    self.factors.len(),
715                    alpha,
716                )
717            }
718            DesignType::LatinHypercube => {
719                // Would integrate with QMC module
720                return Err("Latin Hypercube design not yet implemented".to_string());
721            }
722            DesignType::RandomSampling { n_points } => {
723                let mut rng = seeded_rng(seed);
724                let mut points = Vec::new();
725                for _ in 0..n_points {
726                    let mut point = Vec::new();
727                    for factor in &self.factors {
728                        let idx =
729                            rng.sample(Uniform::new(0, factor.len()).expect("Operation failed"));
730                        point.push(factor[idx]);
731                    }
732                    points.push(point);
733                }
734                points
735            }
736        };
737
738        Ok(ExperimentalDesign {
739            design_points,
740            replications: self.replications,
741            factor_names: (0..self.factors.len())
742                .map(|i| format!("Factor_{}", i))
743                .collect(),
744        })
745    }
746}
747
748#[derive(Debug, Clone)]
749pub struct ExperimentalDesign {
750    pub design_points: Vec<Vec<f64>>,
751    pub replications: usize,
752    pub factor_names: Vec<String>,
753}
754
755#[derive(Debug, Clone)]
756pub struct DatasetProperties {
757    pub n_samples: usize,
758    pub features: HashMap<String, FeatureSpec>,
759}
760
761#[derive(Debug, Clone)]
762pub struct FeatureSpec {
763    pub distribution: FeatureDistribution,
764    pub correlation_target: Option<String>,
765    pub correlation_strength: f64,
766}
767
768#[derive(Debug, Clone)]
769pub enum FeatureDistribution {
770    Normal {
771        mean: f64,
772        std: f64,
773    },
774    Uniform {
775        low: f64,
776        high: f64,
777    },
778    Beta {
779        alpha: f64,
780        beta: f64,
781    },
782    Categorical {
783        categories: Vec<usize>,
784        weights: Vec<f64>,
785    },
786}
787
788#[derive(Debug, Clone)]
789pub struct SyntheticDataset {
790    pub data: HashMap<String, Vec<f64>>,
791    pub properties: DatasetProperties,
792}
793
794#[derive(Debug, Clone)]
795pub struct AugmentationConfig {
796    pub rotation_range: f64,
797    pub scale_range: (f64, f64),
798    pub translation_range: (f64, f64),
799    pub horizontal_flip_prob: f64,
800    pub vertical_flip_prob: f64,
801}
802
803#[derive(Debug, Clone)]
804pub struct AugmentationTransform {
805    pub rotation: f64,
806    pub scale: f64,
807    pub translation: (f64, f64),
808    pub horizontal_flip: bool,
809    pub vertical_flip: bool,
810}
811
812#[derive(Debug)]
813pub struct AugmentationBatch {
814    pub transforms: Vec<AugmentationTransform>,
815}
816
817impl AugmentationBatch {
818    pub fn new(capacity: usize) -> Self {
819        Self {
820            transforms: Vec::with_capacity(capacity),
821        }
822    }
823
824    pub fn add_transform(&mut self, transform: AugmentationTransform) {
825        self.transforms.push(transform);
826    }
827}
828
829#[derive(Debug, Clone)]
830pub enum ExplorationNoiseType {
831    Gaussian { std: f64 },
832    OrnsteinUhlenbeck { theta: f64, sigma: f64, mu: f64 },
833    EpsilonGreedy { epsilon: f64 },
834}
835
836#[cfg(test)]
837mod tests {
838    use super::*;
839    use approx::assert_relative_eq;
840
841    #[test]
842    fn test_linalg_bridge_symmetric_matrix() {
843        let matrix = LinalgBridge::random_symmetric_matrix(5, 42).expect("Operation failed");
844
845        // Check symmetry
846        for i in 0..5 {
847            for j in 0..5 {
848                assert_relative_eq!(matrix[[i, j]], matrix[[j, i]], epsilon = 1e-10);
849            }
850        }
851    }
852
853    #[test]
854    fn test_linalg_bridge_positive_definite() {
855        let matrix =
856            LinalgBridge::random_symmetric_positive_definite(3, 42).expect("Operation failed");
857
858        // Check that all diagonal elements are positive
859        for i in 0..3 {
860            assert!(matrix[[i, i]] > 0.0);
861        }
862
863        // Check symmetry
864        for i in 0..3 {
865            for j in 0..3 {
866                assert_relative_eq!(matrix[[i, j]], matrix[[j, i]], epsilon = 1e-10);
867            }
868        }
869    }
870
871    #[test]
872    fn test_neural_bridge_xavier_init() {
873        let layer_sizes = vec![784, 128, 64, 10];
874        let weights =
875            NeuralBridge::xavier_initialization(&layer_sizes, 42).expect("Operation failed");
876
877        assert_eq!(weights.len(), 3); // 3 weight matrices
878        assert_eq!(weights[0].shape(), [128, 784]);
879        assert_eq!(weights[1].shape(), [64, 128]);
880        assert_eq!(weights[2].shape(), [10, 64]);
881    }
882
883    #[test]
884    fn test_stats_bridge_bootstrap_ci() {
885        let data = vec![1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0];
886
887        let (point_est, lower, upper) = StatsBridge::bootstrap_confidence_interval(
888            &data,
889            |samples| samples.iter().sum::<f64>() / samples.len() as f64, // Mean
890            0.95,
891            1000,
892            42,
893        )
894        .expect("Test: operation failed");
895
896        assert_relative_eq!(point_est, 5.5, epsilon = 0.1);
897        assert!(lower < point_est);
898        assert!(upper > point_est);
899    }
900
901    #[test]
902    fn test_optimization_bridge_genetic_population() {
903        let population = OptimizationBridge::genetic_algorithm_population(
904            10,
905            |rng| {
906                (0..5)
907                    .map(|_| rng.sample(Uniform::new(0.0, 1.0).expect("Operation failed")))
908                    .collect::<Vec<f64>>()
909            },
910            42,
911        );
912
913        assert_eq!(population.len(), 10);
914        for individual in &population {
915            assert_eq!(individual.len(), 5);
916            for &gene in individual {
917                assert!((0.0..=1.0).contains(&gene));
918            }
919        }
920    }
921
922    #[test]
923    fn test_experiment_design_builder() {
924        let design = StatsBridge::design_experiment()
925            .factors(&[vec![1.0, 2.0], vec![0.1, 0.2]])
926            .replications(3)
927            .randomization_seed(42)
928            .build()
929            .expect("Test: operation failed");
930
931        assert_eq!(design.design_points.len(), 4); // 2x2 factorial
932        assert_eq!(design.replications, 3);
933        assert_eq!(design.factor_names.len(), 2);
934    }
935}