Skip to main content

scirs2_core/random/
ecosystem_integration.rs

1//! Ecosystem integration utilities for seamless SCIRS2 module interoperability
2//!
3//! This module provides bridge functions, adapters, and trait implementations that enable
4//! the random number generation system to integrate seamlessly with other SCIRS2 modules
5//! including scirs2-linalg, scirs2-stats, scirs2-neural, scirs2-optimize, and more.
6//!
7//! # Design Philosophy
8//!
9//! - **Zero-copy**: Minimize data copying between modules
10//! - **Type-safe**: Compile-time guarantees for cross-module operations
11//! - **Performance**: Optimized for high-throughput scientific computing
12//! - **Ergonomic**: Simple, intuitive APIs for common workflows
13//!
14//! # Integration Patterns
15//!
16//! 1. **Random Matrix Generation**: For linear algebra operations
17//! 2. **Statistical Distribution Sampling**: For statistical analysis
18//! 3. **Neural Network Initialization**: For deep learning workflows
19//! 4. **Optimization Noise**: For stochastic optimization algorithms
20//! 5. **Scientific Simulation**: For Monte Carlo and sampling methods
21//!
22//! # Examples
23//!
24//! ```rust
25//! use scirs2_core::random::ecosystem_integration::*;
26//!
27//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
28//! // Linear algebra integration
29//! let random_matrix = LinalgBridge::random_symmetric_matrix(10, 42)?; // Smaller for doc test
30//! let eigenvalues = LinalgBridge::random_eigenvalue_problem(5, 1.0, 123)?; // eigenvalue_spread > 0.1
31//!
32//! // Statistical analysis integration
33//! let experiment = StatsBridge::design_experiment()
34//!     .factors(&[vec![1.0, 2.0, 3.0], vec![0.1, 0.2, 0.3]])
35//!     .replications(3) // Smaller for doc test
36//!     .randomization_seed(42)
37//!     .build()?;
38//!
39//! // Neural network initialization
40//! let weights = NeuralBridge::xavier_initialization(&[10, 5, 2], 42)?; // Smaller for doc test
41//! let gradients = NeuralBridge::gradient_noise_injection(0.01, &weights, 123)?;
42//! # Ok(())
43//! # }
44//! ```
45
46use crate::random::{
47    advanced_numerical::*,
48    arrays::*,
49    core::{seeded_rng, Random},
50    distributions::*,
51    parallel::{ParallelRng, ThreadLocalRngPool},
52    scientific::*,
53};
54use ::ndarray::{Array1, Array2, Array3, ArrayD, Dimension, Ix2};
55use rand::Rng;
56use rand_distr::{Distribution, Normal, Uniform};
57use std::collections::HashMap;
58
59/// Bridge for linear algebra operations requiring random number generation
60pub struct LinalgBridge;
61
62impl LinalgBridge {
63    /// Generate a random symmetric positive definite matrix
64    pub fn random_symmetric_positive_definite(
65        size: usize,
66        seed: u64,
67    ) -> Result<Array2<f64>, String> {
68        let mut rng = seeded_rng(seed);
69
70        // Generate random matrix A
71        let mut a = Array2::zeros((size, size));
72        for i in 0..size {
73            for j in 0..size {
74                a[[i, j]] = rng.sample(Normal::new(0.0, 1.0).expect("Operation failed"));
75            }
76        }
77
78        // Compute A * A^T to ensure positive definiteness
79        let at = a.t();
80        let mut result = Array2::zeros((size, size));
81
82        for i in 0..size {
83            for j in 0..size {
84                let mut sum = 0.0;
85                for k in 0..size {
86                    sum += a[[i, k]] * at[[k, j]];
87                }
88                result[[i, j]] = sum;
89            }
90            // Add small diagonal regularization
91            result[[i, i]] += 1e-6;
92        }
93
94        Ok(result)
95    }
96
97    /// Generate random symmetric matrix
98    pub fn random_symmetric_matrix(size: usize, seed: u64) -> Result<Array2<f64>, String> {
99        let mut rng = seeded_rng(seed);
100        let mut matrix = Array2::zeros((size, size));
101
102        for i in 0..size {
103            for j in i..size {
104                let value = rng.sample(Normal::new(0.0, 1.0).expect("Operation failed"));
105                matrix[[i, j]] = value;
106                matrix[[j, i]] = value;
107            }
108        }
109
110        Ok(matrix)
111    }
112
113    /// Generate random orthogonal matrix using QR decomposition
114    pub fn random_orthogonal_matrix(size: usize, seed: u64) -> Result<Array2<f64>, String> {
115        let mut rng = seeded_rng(seed);
116
117        // Generate random matrix
118        let mut a = Array2::zeros((size, size));
119        for i in 0..size {
120            for j in 0..size {
121                a[[i, j]] = rng.sample(Normal::new(0.0, 1.0).expect("Operation failed"));
122            }
123        }
124
125        // Gram-Schmidt orthogonalization
126        let mut q = Array2::zeros((size, size));
127
128        for j in 0..size {
129            // Copy column j from A
130            let mut v = Array1::zeros(size);
131            for i in 0..size {
132                v[i] = a[[i, j]];
133            }
134
135            // Subtract projections onto previous columns
136            for k in 0..j {
137                let mut proj = 0.0;
138                for i in 0..size {
139                    proj += v[i] * q[[i, k]];
140                }
141                for i in 0..size {
142                    v[i] -= proj * q[[i, k]];
143                }
144            }
145
146            // Normalize
147            let norm = (v.iter().map(|x| x * x).sum::<f64>()).sqrt();
148            if norm > 1e-10 {
149                for i in 0..size {
150                    q[[i, j]] = v[i] / norm;
151                }
152            }
153        }
154
155        Ok(q)
156    }
157
158    /// Generate random eigenvalue problem (A, eigenvalues, eigenvectors)
159    pub fn random_eigenvalue_problem(
160        size: usize,
161        eigenvalue_spread: f64,
162        seed: u64,
163    ) -> Result<(Array2<f64>, Vec<f64>, Array2<f64>), String> {
164        let mut rng = seeded_rng(seed);
165
166        // Generate random eigenvalues
167        let mut eigenvalues = Vec::with_capacity(size);
168        for _ in 0..size {
169            eigenvalues
170                .push(rng.sample(Uniform::new(0.1, eigenvalue_spread).expect("Operation failed")));
171        }
172        eigenvalues.sort_by(|a, b| b.partial_cmp(a).expect("Operation failed")); // Sort descending
173
174        // Generate random orthogonal eigenvector matrix
175        let eigenvectors = Self::random_orthogonal_matrix(size, seed + 1)?;
176
177        // Construct matrix A = V * D * V^T
178        let mut diagonal = Array2::zeros((size, size));
179        for i in 0..size {
180            diagonal[[i, i]] = eigenvalues[i];
181        }
182
183        // A = V * D * V^T
184        let mut vd = Array2::zeros((size, size));
185        for i in 0..size {
186            for j in 0..size {
187                let mut sum = 0.0;
188                for k in 0..size {
189                    sum += eigenvectors[[i, k]] * diagonal[[k, j]];
190                }
191                vd[[i, j]] = sum;
192            }
193        }
194
195        let mut a = Array2::zeros((size, size));
196        for i in 0..size {
197            for j in 0..size {
198                let mut sum = 0.0;
199                for k in 0..size {
200                    sum += vd[[i, k]] * eigenvectors[[j, k]]; // V^T = V transpose
201                }
202                a[[i, j]] = sum;
203            }
204        }
205
206        Ok((a, eigenvalues, eigenvectors))
207    }
208
209    /// Generate random sparse matrix with controlled sparsity
210    pub fn random_sparse_matrix(
211        rows: usize,
212        cols: usize,
213        density: f64,
214        seed: u64,
215    ) -> Result<Vec<(usize, usize, f64)>, String> {
216        if !(0.0..=1.0).contains(&density) {
217            return Err("Density must be between 0 and 1".to_string());
218        }
219
220        let mut rng = seeded_rng(seed);
221        let mut triplets = Vec::new();
222
223        let total_elements = rows * cols;
224        let nnz = (total_elements as f64 * density) as usize;
225
226        for _ in 0..nnz {
227            let row = rng.sample(Uniform::new(0, rows).expect("Operation failed"));
228            let col = rng.sample(Uniform::new(0, cols).expect("Operation failed"));
229            let value = rng.sample(Normal::new(0.0, 1.0).expect("Operation failed"));
230            triplets.push((row, col, value));
231        }
232
233        Ok(triplets)
234    }
235}
236
237/// Bridge for statistical analysis operations
238pub struct StatsBridge;
239
240impl StatsBridge {
241    /// Create a comprehensive experimental design
242    pub fn design_experiment() -> ExperimentDesignBuilder {
243        ExperimentDesignBuilder::new()
244    }
245
246    /// Generate synthetic dataset with known statistical properties
247    pub fn synthetic_dataset(
248        properties: DatasetProperties,
249        seed: u64,
250    ) -> Result<SyntheticDataset, String> {
251        let mut rng = seeded_rng(seed);
252        let mut data = HashMap::new();
253
254        // Generate features according to specifications
255        for (name, spec) in properties.features.iter() {
256            let feature_data = match &spec.distribution {
257                FeatureDistribution::Normal { mean, std } => {
258                    let normal = Normal::new(*mean, *std).expect("Operation failed");
259                    (0..properties.n_samples)
260                        .map(|_| rng.sample(normal))
261                        .collect::<Vec<f64>>()
262                }
263                FeatureDistribution::Uniform { low, high } => {
264                    let uniform = Uniform::new(*low, *high).expect("Operation failed");
265                    (0..properties.n_samples)
266                        .map(|_| rng.sample(uniform))
267                        .collect::<Vec<f64>>()
268                }
269                FeatureDistribution::Beta { alpha, beta } => {
270                    let beta_dist = Beta::new(*alpha, *beta)?;
271                    (0..properties.n_samples)
272                        .map(|_| beta_dist.sample(&mut rng))
273                        .collect::<Vec<f64>>()
274                }
275                FeatureDistribution::Categorical {
276                    categories,
277                    weights,
278                } => {
279                    let categorical = Categorical::new(weights.clone())?;
280                    (0..properties.n_samples)
281                        .map(|_| categorical.sample(&mut rng) as f64)
282                        .collect::<Vec<f64>>()
283                }
284            };
285            data.insert(name.clone(), feature_data);
286        }
287
288        Ok(SyntheticDataset { data, properties })
289    }
290
291    /// Generate correlated multivariate dataset
292    pub fn correlated_dataset(
293        means: Vec<f64>,
294        correlation_matrix: Vec<Vec<f64>>,
295        n_samples: usize,
296        seed: u64,
297    ) -> Result<Array2<f64>, String> {
298        let mvn = MultivariateNormal::new(means, correlation_matrix)?;
299        let mut rng = seeded_rng(seed);
300
301        let mut samples = Array2::zeros((n_samples, mvn.dimension()));
302        for i in 0..n_samples {
303            let sample = mvn.sample(&mut rng);
304            for j in 0..sample.len() {
305                samples[[i, j]] = sample[j];
306            }
307        }
308
309        Ok(samples)
310    }
311
312    /// Bootstrap confidence intervals
313    pub fn bootstrap_confidence_interval<F>(
314        data: &[f64],
315        statistic: F,
316        confidence_level: f64,
317        n_bootstrap: usize,
318        seed: u64,
319    ) -> Result<(f64, f64, f64), String>
320    where
321        F: Fn(&[f64]) -> f64 + Send + Sync,
322    {
323        let pool = ThreadLocalRngPool::new(seed);
324
325        let bootstrap_stats: Vec<f64> = (0..n_bootstrap)
326            .map(|_| {
327                let mut rng = seeded_rng(seed);
328                use crate::random::slice_ops::ScientificSliceRandom;
329                let bootstrap_sample =
330                    data.scientific_sample_with_replacement(&mut rng, data.len());
331                let sample_values: Vec<f64> = bootstrap_sample.iter().map(|&&x| x).collect();
332                statistic(&sample_values)
333            })
334            .collect();
335
336        let mut sorted_stats = bootstrap_stats;
337        sorted_stats.sort_by(|a, b| a.partial_cmp(b).expect("Operation failed"));
338
339        let alpha = 1.0 - confidence_level;
340        let lower_idx = (alpha / 2.0 * sorted_stats.len() as f64) as usize;
341        let upper_idx = ((1.0 - alpha / 2.0) * sorted_stats.len() as f64) as usize;
342
343        let lower_bound = sorted_stats[lower_idx];
344        let upper_bound = sorted_stats[upper_idx.min(sorted_stats.len() - 1)];
345        let point_estimate = statistic(data);
346
347        Ok((point_estimate, lower_bound, upper_bound))
348    }
349}
350
351/// Bridge for neural network operations
352pub struct NeuralBridge;
353
354impl NeuralBridge {
355    /// Xavier/Glorot weight initialization
356    pub fn xavier_initialization(
357        layer_sizes: &[usize],
358        seed: u64,
359    ) -> Result<Vec<Array2<f64>>, String> {
360        let mut rng = seeded_rng(seed);
361        let mut weights = Vec::new();
362
363        for i in 0..layer_sizes.len() - 1 {
364            let fan_in = layer_sizes[i] as f64;
365            let fan_out = layer_sizes[i + 1] as f64;
366            let std = (2.0 / (fan_in + fan_out)).sqrt();
367
368            let normal = Normal::new(0.0, std).expect("Operation failed");
369            let mut weight_matrix = Array2::zeros((layer_sizes[i + 1], layer_sizes[i]));
370
371            for j in 0..layer_sizes[i + 1] {
372                for k in 0..layer_sizes[i] {
373                    weight_matrix[[j, k]] = rng.sample(normal);
374                }
375            }
376
377            weights.push(weight_matrix);
378        }
379
380        Ok(weights)
381    }
382
383    /// He/Kaiming weight initialization for ReLU networks
384    pub fn he_initialization(layer_sizes: &[usize], seed: u64) -> Result<Vec<Array2<f64>>, String> {
385        let mut rng = seeded_rng(seed);
386        let mut weights = Vec::new();
387
388        for i in 0..layer_sizes.len() - 1 {
389            let fan_in = layer_sizes[i] as f64;
390            let std = (2.0 / fan_in).sqrt();
391
392            let normal = Normal::new(0.0, std).expect("Operation failed");
393            let mut weight_matrix = Array2::zeros((layer_sizes[i + 1], layer_sizes[i]));
394
395            for j in 0..layer_sizes[i + 1] {
396                for k in 0..layer_sizes[i] {
397                    weight_matrix[[j, k]] = rng.sample(normal);
398                }
399            }
400
401            weights.push(weight_matrix);
402        }
403
404        Ok(weights)
405    }
406
407    /// LeCun weight initialization for SELU networks
408    pub fn lecun_initialization(
409        layer_sizes: &[usize],
410        seed: u64,
411    ) -> Result<Vec<Array2<f64>>, String> {
412        let mut rng = seeded_rng(seed);
413        let mut weights = Vec::new();
414
415        for i in 0..layer_sizes.len() - 1 {
416            let fan_in = layer_sizes[i] as f64;
417            let std = (1.0 / fan_in).sqrt();
418
419            let normal = Normal::new(0.0, std).expect("Operation failed");
420            let mut weight_matrix = Array2::zeros((layer_sizes[i + 1], layer_sizes[i]));
421
422            for j in 0..layer_sizes[i + 1] {
423                for k in 0..layer_sizes[i] {
424                    weight_matrix[[j, k]] = rng.sample(normal);
425                }
426            }
427
428            weights.push(weight_matrix);
429        }
430
431        Ok(weights)
432    }
433
434    /// Generate random dropout masks
435    pub fn dropout_masks(
436        shapes: &[(usize, usize)],
437        dropout_rate: f64,
438        seed: u64,
439    ) -> Result<Vec<Array2<f64>>, String> {
440        let mut rng = seeded_rng(seed);
441        let mut masks = Vec::new();
442
443        let keep_prob = 1.0 - dropout_rate;
444        let uniform = Uniform::new(0.0, 1.0).expect("Operation failed");
445
446        for &(rows, cols) in shapes {
447            let mut mask = Array2::zeros((rows, cols));
448            for i in 0..rows {
449                for j in 0..cols {
450                    mask[[i, j]] = if rng.sample(uniform) < keep_prob {
451                        1.0 / keep_prob
452                    } else {
453                        0.0
454                    };
455                }
456            }
457            masks.push(mask);
458        }
459
460        Ok(masks)
461    }
462
463    /// Gradient noise injection for improved generalization
464    pub fn gradient_noise_injection(
465        noise_scale: f64,
466        gradients: &[Array2<f64>],
467        seed: u64,
468    ) -> Result<Vec<Array2<f64>>, String> {
469        let mut rng = seeded_rng(seed);
470        let mut noisy_gradients = Vec::new();
471
472        let normal = Normal::new(0.0, noise_scale).expect("Operation failed");
473
474        for gradient in gradients {
475            let mut noisy_gradient = gradient.clone();
476            for elem in noisy_gradient.iter_mut() {
477                *elem += rng.sample(normal);
478            }
479            noisy_gradients.push(noisy_gradient);
480        }
481
482        Ok(noisy_gradients)
483    }
484
485    /// Generate random augmentation parameters
486    pub fn augmentation_parameters(
487        batch_size: usize,
488        config: AugmentationConfig,
489        seed: u64,
490    ) -> AugmentationBatch {
491        let mut rng = seeded_rng(seed);
492        let mut batch = AugmentationBatch::new(batch_size);
493
494        for _ in 0..batch_size {
495            let rotation = if config.rotation_range > 0.0 {
496                rng.sample(
497                    Uniform::new(-config.rotation_range, config.rotation_range)
498                        .expect("Operation failed"),
499                )
500            } else {
501                0.0
502            };
503
504            let scale = if config.scale_range.0 < config.scale_range.1 {
505                rng.sample(
506                    Uniform::new(config.scale_range.0, config.scale_range.1)
507                        .expect("Operation failed"),
508                )
509            } else {
510                1.0
511            };
512
513            let translation_x = if config.translation_range.0 > 0.0 {
514                rng.sample(
515                    Uniform::new(-config.translation_range.0, config.translation_range.0)
516                        .expect("Operation failed"),
517                )
518            } else {
519                0.0
520            };
521
522            let translation_y = if config.translation_range.1 > 0.0 {
523                rng.sample(
524                    Uniform::new(-config.translation_range.1, config.translation_range.1)
525                        .expect("Operation failed"),
526                )
527            } else {
528                0.0
529            };
530
531            batch.add_transform(AugmentationTransform {
532                rotation,
533                scale,
534                translation: (translation_x, translation_y),
535                horizontal_flip: rng.sample(Uniform::new(0.0, 1.0).expect("Operation failed"))
536                    < config.horizontal_flip_prob,
537                vertical_flip: rng.sample(Uniform::new(0.0, 1.0).expect("Operation failed"))
538                    < config.vertical_flip_prob,
539            });
540        }
541
542        batch
543    }
544}
545
546/// Bridge for optimization algorithms requiring randomness
547pub struct OptimizationBridge;
548
549impl OptimizationBridge {
550    /// Generate random initial population for genetic algorithms
551    pub fn genetic_algorithm_population<T>(
552        population_size: usize,
553        individual_generator: impl Fn(&mut Random<rand::rngs::StdRng>) -> T,
554        seed: u64,
555    ) -> Vec<T> {
556        let mut rng = seeded_rng(seed);
557        (0..population_size)
558            .map(|_| individual_generator(&mut rng))
559            .collect()
560    }
561
562    /// Generate random perturbations for simulated annealing
563    pub fn simulated_annealing_perturbation(
564        current_state: &[f64],
565        temperature: f64,
566        perturbation_scale: f64,
567        seed: u64,
568    ) -> Vec<f64> {
569        let mut rng = seeded_rng(seed);
570        let std = perturbation_scale * temperature.sqrt();
571        let normal = Normal::new(0.0, std).expect("Operation failed");
572
573        current_state
574            .iter()
575            .map(|&x| x + rng.sample(normal))
576            .collect()
577    }
578
579    /// Generate random directions for coordinate descent
580    pub fn random_coordinate_directions(
581        dimensions: usize,
582        n_directions: usize,
583        seed: u64,
584    ) -> Array2<f64> {
585        let mut rng = seeded_rng(seed);
586        let mut directions = Array2::zeros((n_directions, dimensions));
587
588        for i in 0..n_directions {
589            // Generate random unit vector
590            let mut direction = vec![0.0; dimensions];
591            for j in 0..dimensions {
592                direction[j] = rng.sample(Normal::new(0.0, 1.0).expect("Operation failed"));
593            }
594
595            // Normalize
596            let norm = direction.iter().map(|x| x * x).sum::<f64>().sqrt();
597            for j in 0..dimensions {
598                directions[[i, j]] = direction[j] / norm;
599            }
600        }
601
602        directions
603    }
604
605    /// Generate noise for parameter exploration in reinforcement learning
606    pub fn exploration_noise(
607        action_dimensions: usize,
608        noise_type: ExplorationNoiseType,
609        time_step: usize,
610        seed: u64,
611    ) -> Vec<f64> {
612        let mut rng = seeded_rng(seed + time_step as u64);
613
614        match noise_type {
615            ExplorationNoiseType::Gaussian { std } => {
616                let normal = Normal::new(0.0, std).expect("Operation failed");
617                (0..action_dimensions).map(|_| rng.sample(normal)).collect()
618            }
619            ExplorationNoiseType::OrnsteinUhlenbeck { theta, sigma, mu } => {
620                // Simplified OU process (would need state for full implementation)
621                let dt = 1.0;
622                let std = sigma * (2.0 * theta * dt).sqrt();
623                let normal = Normal::new(0.0, std).expect("Operation failed");
624                (0..action_dimensions)
625                    .map(|_| mu + rng.sample(normal))
626                    .collect()
627            }
628            ExplorationNoiseType::EpsilonGreedy { epsilon } => {
629                let uniform = Uniform::new(0.0, 1.0).expect("Operation failed");
630                (0..action_dimensions)
631                    .map(|_| {
632                        if rng.sample(uniform) < epsilon {
633                            1.0
634                        } else {
635                            0.0
636                        }
637                    })
638                    .collect()
639            }
640        }
641    }
642}
643
644// Supporting types and builders
645
646#[derive(Debug, Clone)]
647pub struct ExperimentDesignBuilder {
648    factors: Vec<Vec<f64>>,
649    replications: usize,
650    blocking_factors: Vec<String>,
651    randomization_seed: Option<u64>,
652    design_type: DesignType,
653}
654
655#[derive(Debug, Clone)]
656pub enum DesignType {
657    FullFactorial,
658    FractionalFactorial { fraction: f64 },
659    CentralComposite { alpha: f64 },
660    LatinHypercube,
661    RandomSampling { n_points: usize },
662}
663
664impl ExperimentDesignBuilder {
665    pub fn new() -> Self {
666        Self {
667            factors: Vec::new(),
668            replications: 1,
669            blocking_factors: Vec::new(),
670            randomization_seed: None,
671            design_type: DesignType::FullFactorial,
672        }
673    }
674
675    pub fn factors(mut self, factors: &[Vec<f64>]) -> Self {
676        self.factors = factors.to_vec();
677        self
678    }
679
680    pub fn replications(mut self, n: usize) -> Self {
681        self.replications = n;
682        self
683    }
684
685    pub fn randomization_seed(mut self, seed: u64) -> Self {
686        self.randomization_seed = Some(seed);
687        self
688    }
689
690    pub fn design_type(mut self, design: DesignType) -> Self {
691        self.design_type = design;
692        self
693    }
694
695    pub fn build(self) -> Result<ExperimentalDesign, String> {
696        let seed = self.randomization_seed.unwrap_or(42);
697
698        let design_points = match self.design_type {
699            DesignType::FullFactorial => {
700                crate::random::scientific::ExperimentalDesign::factorial_design(&self.factors)
701            }
702            DesignType::FractionalFactorial { fraction } => {
703                crate::random::scientific::ExperimentalDesign::fractional_factorial_design(
704                    &self.factors,
705                    fraction,
706                    seed,
707                )
708            }
709            DesignType::CentralComposite { alpha } => {
710                crate::random::scientific::ExperimentalDesign::central_composite_design(
711                    self.factors.len(),
712                    alpha,
713                )
714            }
715            DesignType::LatinHypercube => {
716                // Would integrate with QMC module
717                return Err("Latin Hypercube design not yet implemented".to_string());
718            }
719            DesignType::RandomSampling { n_points } => {
720                let mut rng = seeded_rng(seed);
721                let mut points = Vec::new();
722                for _ in 0..n_points {
723                    let mut point = Vec::new();
724                    for factor in &self.factors {
725                        let idx =
726                            rng.sample(Uniform::new(0, factor.len()).expect("Operation failed"));
727                        point.push(factor[idx]);
728                    }
729                    points.push(point);
730                }
731                points
732            }
733        };
734
735        Ok(ExperimentalDesign {
736            design_points,
737            replications: self.replications,
738            factor_names: (0..self.factors.len())
739                .map(|i| format!("Factor_{}", i))
740                .collect(),
741        })
742    }
743}
744
745#[derive(Debug, Clone)]
746pub struct ExperimentalDesign {
747    pub design_points: Vec<Vec<f64>>,
748    pub replications: usize,
749    pub factor_names: Vec<String>,
750}
751
752#[derive(Debug, Clone)]
753pub struct DatasetProperties {
754    pub n_samples: usize,
755    pub features: HashMap<String, FeatureSpec>,
756}
757
758#[derive(Debug, Clone)]
759pub struct FeatureSpec {
760    pub distribution: FeatureDistribution,
761    pub correlation_target: Option<String>,
762    pub correlation_strength: f64,
763}
764
765#[derive(Debug, Clone)]
766pub enum FeatureDistribution {
767    Normal {
768        mean: f64,
769        std: f64,
770    },
771    Uniform {
772        low: f64,
773        high: f64,
774    },
775    Beta {
776        alpha: f64,
777        beta: f64,
778    },
779    Categorical {
780        categories: Vec<usize>,
781        weights: Vec<f64>,
782    },
783}
784
785#[derive(Debug, Clone)]
786pub struct SyntheticDataset {
787    pub data: HashMap<String, Vec<f64>>,
788    pub properties: DatasetProperties,
789}
790
791#[derive(Debug, Clone)]
792pub struct AugmentationConfig {
793    pub rotation_range: f64,
794    pub scale_range: (f64, f64),
795    pub translation_range: (f64, f64),
796    pub horizontal_flip_prob: f64,
797    pub vertical_flip_prob: f64,
798}
799
800#[derive(Debug, Clone)]
801pub struct AugmentationTransform {
802    pub rotation: f64,
803    pub scale: f64,
804    pub translation: (f64, f64),
805    pub horizontal_flip: bool,
806    pub vertical_flip: bool,
807}
808
809#[derive(Debug)]
810pub struct AugmentationBatch {
811    pub transforms: Vec<AugmentationTransform>,
812}
813
814impl AugmentationBatch {
815    pub fn new(capacity: usize) -> Self {
816        Self {
817            transforms: Vec::with_capacity(capacity),
818        }
819    }
820
821    pub fn add_transform(&mut self, transform: AugmentationTransform) {
822        self.transforms.push(transform);
823    }
824}
825
826#[derive(Debug, Clone)]
827pub enum ExplorationNoiseType {
828    Gaussian { std: f64 },
829    OrnsteinUhlenbeck { theta: f64, sigma: f64, mu: f64 },
830    EpsilonGreedy { epsilon: f64 },
831}
832
833#[cfg(test)]
834mod tests {
835    use super::*;
836    use approx::assert_relative_eq;
837
838    #[test]
839    fn test_linalg_bridge_symmetric_matrix() {
840        let matrix = LinalgBridge::random_symmetric_matrix(5, 42).expect("Operation failed");
841
842        // Check symmetry
843        for i in 0..5 {
844            for j in 0..5 {
845                assert_relative_eq!(matrix[[i, j]], matrix[[j, i]], epsilon = 1e-10);
846            }
847        }
848    }
849
850    #[test]
851    fn test_linalg_bridge_positive_definite() {
852        let matrix =
853            LinalgBridge::random_symmetric_positive_definite(3, 42).expect("Operation failed");
854
855        // Check that all diagonal elements are positive
856        for i in 0..3 {
857            assert!(matrix[[i, i]] > 0.0);
858        }
859
860        // Check symmetry
861        for i in 0..3 {
862            for j in 0..3 {
863                assert_relative_eq!(matrix[[i, j]], matrix[[j, i]], epsilon = 1e-10);
864            }
865        }
866    }
867
868    #[test]
869    fn test_neural_bridge_xavier_init() {
870        let layer_sizes = vec![784, 128, 64, 10];
871        let weights =
872            NeuralBridge::xavier_initialization(&layer_sizes, 42).expect("Operation failed");
873
874        assert_eq!(weights.len(), 3); // 3 weight matrices
875        assert_eq!(weights[0].shape(), [128, 784]);
876        assert_eq!(weights[1].shape(), [64, 128]);
877        assert_eq!(weights[2].shape(), [10, 64]);
878    }
879
880    #[test]
881    #[ignore] // Flaky statistical test - bootstrap confidence intervals can be sensitive
882    fn test_stats_bridge_bootstrap_ci() {
883        let data = vec![1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0];
884
885        let (point_est, lower, upper) = StatsBridge::bootstrap_confidence_interval(
886            &data,
887            |samples| samples.iter().sum::<f64>() / samples.len() as f64, // Mean
888            0.95,
889            1000,
890            42,
891        )
892        .expect("Test: operation failed");
893
894        assert_relative_eq!(point_est, 5.5, epsilon = 0.1);
895        assert!(lower < point_est);
896        assert!(upper > point_est);
897    }
898
899    #[test]
900    fn test_optimization_bridge_genetic_population() {
901        let population = OptimizationBridge::genetic_algorithm_population(
902            10,
903            |rng| {
904                (0..5)
905                    .map(|_| rng.sample(Uniform::new(0.0, 1.0).expect("Operation failed")))
906                    .collect::<Vec<f64>>()
907            },
908            42,
909        );
910
911        assert_eq!(population.len(), 10);
912        for individual in &population {
913            assert_eq!(individual.len(), 5);
914            for &gene in individual {
915                assert!((0.0..=1.0).contains(&gene));
916            }
917        }
918    }
919
920    #[test]
921    fn test_experiment_design_builder() {
922        let design = StatsBridge::design_experiment()
923            .factors(&[vec![1.0, 2.0], vec![0.1, 0.2]])
924            .replications(3)
925            .randomization_seed(42)
926            .build()
927            .expect("Test: operation failed");
928
929        assert_eq!(design.design_points.len(), 4); // 2x2 factorial
930        assert_eq!(design.replications, 3);
931        assert_eq!(design.factor_names.len(), 2);
932    }
933}