Skip to main content

scirs2_core/random/
mod.rs

1//! Ultra-advanced random number generation for SCIRS2 ecosystem
2//!
3//! This module provides the most comprehensive and cutting-edge random number generation
4//! capabilities available, designed specifically for scientific computing, machine learning,
5//! and quantum-inspired algorithms with unparalleled features for reproducibility,
6//! performance, and specialized ultra-modern distributions.
7//!
8//! ## Quick Start
9//!
10//! ```rust
11//! // For quick prototyping - use the quick module
12//! use scirs2_core::random::quick::*;
13//! let x = random_f64();
14//! let data = random_vector(100); // Use smaller data for doc tests
15//!
16//! // For scientific computing - use the prelude
17//! use scirs2_core::random::prelude::*;
18//! let mut rng = thread_rng();
19//! let sample = rng.sample(Normal::new(0.0, 1.0).expect("Operation failed"));
20//! ```
21//!
22//! ## Module Organization
23//!
24//! ### 🚀 **Cutting-Edge Modules**
25//! - [`cutting_edge_mcmc`] - HMC, NUTS, SVGD, and advanced MCMC methods
26//! - [`neural_sampling`] - Normalizing flows, VAE, diffusion models
27//! - [`quantum_inspired`] - Quantum algorithms for classical computation
28//! - [`advanced_numerical`] - Multi-level Monte Carlo, adaptive sampling
29//! - [`ecosystem_integration`] - Seamless SCIRS2 module interoperability
30//!
31//! ### 🎯 **Workflow-Based Modules**
32//! - [`prelude`] - Most commonly used items (Rust idiom)
33//! - [`quick`] - Rapid prototyping with minimal setup
34//! - [`scientific`] - Research and scientific computing workflows
35//! - [`ml`] - Machine learning specific utilities
36//!
37//! ### âš¡ **Core Implementation Modules**
38//! - [`core`] - Core Random struct and fundamental operations
39//! - [`distributions`] - Advanced statistical distributions
40//! - [`arrays`] - Optimized bulk array generation
41//! - [`slice_ops`] - Enhanced slice operations and sampling
42//!
43//! ### 🔬 **Specialized Modules**
44//! - [`qmc`] - Quasi-Monte Carlo sequences (Sobol, Halton, LHS)
45//! - [`variance_reduction`] - Monte Carlo variance reduction techniques
46//! - [`secure`] - Cryptographically secure random generation
47//! - [`parallel`] - Thread-safe parallel random generation
48
49// Core random functionality
50pub mod core;
51pub mod seq;
52pub mod slice_ops;
53
54// Advanced distributions and sampling
55pub mod arrays;
56pub mod distributions;
57pub mod distributions_unified;
58
59// Monte Carlo and variance reduction
60pub mod qmc;
61pub mod variance_reduction;
62
63// Security and parallel computing
64pub mod parallel;
65pub mod secure;
66
67// Enhanced workflow-based modules
68pub mod ml;
69pub mod prelude;
70pub mod quick;
71pub mod scientific;
72
73// Cutting-edge modules
74pub mod advanced_numerical;
75pub mod cutting_edge_mcmc;
76pub mod ecosystem_integration;
77pub mod neural_sampling;
78pub mod quantum_inspired;
79
80// Re-export core functionality (except Random which we redefine for compatibility)
81// Use self::core to avoid ambiguity with Rust's standard library ::core (critical for Windows builds)
82pub use self::core::{seeded_rng, thread_rng, DistributionExt};
83
84// Re-export RNG types for SCIRS2 POLICY compliance
85pub use rand_chacha::{ChaCha12Rng, ChaCha20Rng, ChaCha8Rng};
86
87// Re-export the core Random as CoreRandom for internal use
88pub use self::core::Random as CoreRandom;
89
90// Re-export enhanced slice operations
91pub use slice_ops::{ScientificSliceRandom, SliceRandomExt};
92
93// Note: seq module is available as scirs2_core::random::seq
94
95// Re-export slice convenience functions under different name to avoid conflict
96// pub use slice_ops::convenience as slice_convenience;
97
98// Re-export specialized distributions
99pub use distributions::{
100    Beta, Categorical, Dirichlet, GammaDist, MultivariateNormal, VonMises, WeightedChoice,
101};
102
103// Re-export unified distribution interface for ecosystem compatibility
104pub use distributions_unified::{
105    UnifiedBeta, UnifiedBinomial, UnifiedCauchy, UnifiedChiSquared, UnifiedDirichlet,
106    UnifiedDistribution, UnifiedDistributionError, UnifiedExp, UnifiedFisherF, UnifiedGamma,
107    UnifiedLogNormal, UnifiedNormal, UnifiedPoisson, UnifiedStudentT, UnifiedWeibull,
108};
109
110// Re-export optimized array operations
111pub use arrays::{
112    random_exponential_array, random_gamma_array, random_he_weights, random_normal_array,
113    random_sparse_array, random_uniform_array, random_xavier_weights, OptimizedArrayRandom,
114};
115
116// Re-export variance reduction techniques
117pub use variance_reduction::{
118    AntitheticSampling, CommonRatio, ControlVariate, ImportanceSplitting,
119};
120
121// Re-export QMC sequences
122pub use qmc::{
123    HaltonGenerator, LatinHypercubeSampler, LowDiscrepancySequence, QmcError, SobolGenerator,
124};
125
126// Re-export secure random generation
127pub use secure::{utils as secure_utils, SecureRandom, SecureRngPool};
128
129// Re-export parallel operations
130pub use parallel::{BatchRng, DistributedRngPool, ParallelRng, ThreadLocalRngPool};
131
132// Re-export cutting-edge algorithms
133pub use advanced_numerical::{
134    AdaptiveResult, AdaptiveSampler, ImportanceResult, ImportanceSampler, MLMCResult,
135    MultiLevelMonteCarlo, SequentialMonteCarlo,
136};
137
138pub use cutting_edge_mcmc::{
139    EllipticalSliceSampler, HamiltonianMonteCarlo, NoUTurnSampler, ParallelTempering,
140    SteinVariationalGradientDescent,
141};
142
143pub use neural_sampling::{
144    DiffusionConfig, EnergyBasedModel, NeuralPosteriorEstimation, NormalizingFlow,
145    ScoreBasedDiffusion,
146};
147
148pub use quantum_inspired::{
149    CoinParameters, QuantumAmplitudeAmplification, QuantumInspiredAnnealing,
150    QuantumInspiredEvolutionary, QuantumWalk,
151};
152
153pub use ecosystem_integration::{
154    AugmentationConfig, ExperimentalDesign, LinalgBridge, NeuralBridge, OptimizationBridge,
155    StatsBridge, SyntheticDataset,
156};
157
158// Re-export external dependencies for convenience
159pub use ::ndarray::Dimension;
160pub use rand::prelude as rand_prelude;
161pub use rand::rngs;
162pub use rand::seq::SliceRandom;
163pub use rand::{Rng, RngCore, SeedableRng};
164pub use rand_distr as rand_distributions;
165pub use rand_distr::uniform;
166
167/// Convenience function to generate a random value of the inferred type
168///
169/// This function generates a random value using the thread-local RNG.
170/// The type is inferred from context, or can be specified explicitly.
171///
172/// # Examples
173///
174/// ```
175/// use scirs2_core::random::random;
176///
177/// // Generate random f64
178/// let x: f64 = random();
179/// assert!(x >= 0.0 && x < 1.0);
180///
181/// // Generate random bool
182/// let b: bool = random();
183///
184/// // Explicit type annotation
185/// let y = random::<f32>();
186/// ```
187pub fn random<T>() -> T
188where
189    rand::distr::StandardUniform: rand::distr::Distribution<T>,
190{
191    rand::random()
192}
193
194/// Convenience function to create a thread-local RNG
195///
196/// This is equivalent to `thread_rng()` but provides a shorter name
197/// for compatibility with code that uses `rng()`.
198///
199/// # Examples
200///
201/// ```
202/// use scirs2_core::random::rng;
203/// use scirs2_core::random::Rng;
204///
205/// let mut rng = rng();
206/// let x: f64 = rng.random();
207/// ```
208pub fn rng() -> rand::rngs::ThreadRng {
209    rand::rng()
210}
211
212// Comprehensive re-export of ALL rand_distr distributions for SciRS2 ecosystem compatibility
213// This ensures other projects can access any distribution through scirs2-core
214pub use rand_distr::{
215    // Other distributions
216    Alphanumeric,
217    // Discrete distributions
218    Bernoulli as RandBernoulli,
219    // Continuous distributions
220    Beta as RandBeta,
221    Binomial,
222    Cauchy,
223    ChiSquared,
224    // Multivariate distributions
225    Dirichlet as RandDirichlet,
226    // Distribution trait
227    Distribution,
228    Exp,
229    FisherF,
230    Gamma as RandGamma,
231    Geometric,
232    Hypergeometric,
233    InverseGaussian,
234    LogNormal,
235    Normal as RandNormal,
236    Open01,
237    OpenClosed01,
238    Pareto,
239    Pert,
240    Poisson,
241    StandardNormal,
242    StudentT,
243    Triangular,
244    Uniform as RandUniform,
245    UnitBall,
246    UnitCircle,
247    UnitDisc,
248    UnitSphere,
249    Weibull,
250    Zeta,
251    Zipf,
252};
253
254// Re-export WeightedIndex from weighted submodule
255pub use rand_distr::weighted::WeightedIndex;
256
257// Clean, unprefixed type aliases for common distributions (for easier use)
258// These allow `use scirs2_core::random::Normal;` instead of `use scirs2_core::random::RandNormal;`
259pub use rand_distr::Bernoulli;
260pub use rand_distr::Exp as Exponential; // Exponential is just Exp in rand_distr
261pub use rand_distr::Gamma;
262pub use rand_distr::Normal;
263pub use rand_distr::Uniform;
264
265// Re-export ndarray-rand RandomExt trait if available
266#[cfg(feature = "random")]
267pub use ndarray_rand::RandomExt;
268
269// Compatibility layer for systems without random feature
270#[cfg(not(feature = "random"))]
271pub trait RandomExt<T, D> {
272    fn random_using<R: rand::Rng>(
273        shape: D,
274        distribution: impl rand_distr::Distribution<T>,
275        rng: &mut R,
276    ) -> Self;
277}
278
279#[cfg(not(feature = "random"))]
280impl<T, D> RandomExt<T, D> for crate::ndarray::ArrayBase<crate::ndarray::OwnedRepr<T>, D>
281where
282    D: crate::ndarray::Dimension,
283{
284    fn random_using<R: rand::Rng>(
285        shape: D,
286        distribution: impl rand_distr::Distribution<T>,
287        rng: &mut R,
288    ) -> Self {
289        let size = shape.size();
290        let mut data = Vec::with_capacity(size);
291        for _ in 0..size {
292            data.push(distribution.sample(rng));
293        }
294        Self::from_shape_vec(shape, data).expect("Operation failed")
295    }
296}
297
298/// Legacy compatibility functions for backward compatibility
299pub mod legacy {
300    use super::*;
301    use rand_distr::Uniform;
302
303    /// Compatibility wrapper for updated rand API
304    pub fn rng() -> Random<rand::rngs::ThreadRng> {
305        Random { rng: rand::rng() }
306    }
307
308    /// Generate a random f64 value between 0.0 and 1.0
309    pub fn f64() -> f64 {
310        rand::random::<f64>()
311    }
312
313    /// Generate a random f32 value between 0.0 and 1.0
314    pub fn f32() -> f32 {
315        rand::random::<f32>()
316    }
317
318    /// Generate a random usize value in the given range
319    pub fn usize(range: std::ops::Range<usize>) -> usize {
320        rand::rng().random_range(range)
321    }
322}
323
324/// High-level convenience functions for common operations
325pub mod convenience {
326    use super::*;
327    use ::ndarray::{Array, Dimension, IxDyn};
328    use rand_distr::{Distribution, Normal, Uniform};
329
330    /// Generate a uniform random number in [0, 1)
331    pub fn uniform() -> f64 {
332        let mut rng = thread_rng();
333        rng.sample(Uniform::new(0.0, 1.0).expect("Operation failed"))
334    }
335
336    /// Generate a standard normal random number (mean=0, std=1)
337    pub fn normal() -> f64 {
338        let mut rng = thread_rng();
339        rng.sample(Normal::new(0.0, 1.0).expect("Operation failed"))
340    }
341
342    /// Generate a random integer in the given range
343    pub fn integer(min: i64, max: i64) -> i64 {
344        let mut rng = thread_rng();
345        rng.sample(Uniform::new_inclusive(min, max).expect("Operation failed"))
346    }
347
348    /// Generate a random boolean
349    pub fn boolean() -> bool {
350        let mut rng = thread_rng();
351        rng.random_bool(0.5)
352    }
353
354    /// Generate a random array with uniform distribution
355    pub fn uniform_array<Sh: Into<IxDyn>>(shape: Sh) -> Array<f64, IxDyn> {
356        let mut rng = thread_rng();
357        let shape = shape.into();
358        let size = shape.size();
359        let values: Vec<f64> = (0..size)
360            .map(|_| rng.sample(Uniform::new(0.0, 1.0).expect("Operation failed")))
361            .collect();
362        Array::from_shape_vec(shape, values).expect("Operation failed")
363    }
364
365    /// Generate a random array with normal distribution
366    pub fn normal_array<Sh: Into<IxDyn>>(shape: Sh, mean: f64, std: f64) -> Array<f64, IxDyn> {
367        let mut rng = thread_rng();
368        let shape = shape.into();
369        let size = shape.size();
370        let values: Vec<f64> = (0..size)
371            .map(|_| rng.sample(Normal::new(mean, std).expect("Operation failed")))
372            .collect();
373        Array::from_shape_vec(shape, values).expect("Operation failed")
374    }
375}
376
377/// Sampling utilities for common statistical operations
378pub mod sampling {
379    use super::*;
380    use ::ndarray::{Array, Dimension, IxDyn};
381    use rand_distr::{Distribution, Exp, LogNormal, Normal, Uniform};
382
383    /// Sample uniformly from [0, 1)
384    pub fn random_uniform01<R: rand::Rng>(rng: &mut Random<R>) -> f64 {
385        rng.sample(Uniform::new(0.0, 1.0).expect("Operation failed"))
386    }
387
388    /// Sample from a standard normal distribution (mean 0, std dev 1)
389    pub fn random_standard_normal<R: rand::Rng>(rng: &mut Random<R>) -> f64 {
390        rng.sample(Normal::new(0.0, 1.0).expect("Operation failed"))
391    }
392
393    /// Sample from a normal distribution with given mean and standard deviation
394    pub fn random_normal<R: rand::Rng>(rng: &mut Random<R>, mean: f64, stddev: f64) -> f64 {
395        rng.sample(Normal::new(mean, stddev).expect("Operation failed"))
396    }
397
398    /// Sample from a log-normal distribution
399    pub fn random_lognormal<R: rand::Rng>(rng: &mut Random<R>, mean: f64, stddev: f64) -> f64 {
400        rng.sample(LogNormal::new(mean, stddev).expect("Operation failed"))
401    }
402
403    /// Sample from an exponential distribution
404    pub fn random_exponential<R: rand::Rng>(rng: &mut Random<R>, lambda: f64) -> f64 {
405        rng.sample(Exp::new(lambda).expect("Operation failed"))
406    }
407
408    /// Generate an array of random integers in a range
409    pub fn random_integers<R: rand::Rng, Sh>(
410        rng: &mut Random<R>,
411        min: i64,
412        max: i64,
413        shape: Sh,
414    ) -> Array<i64, IxDyn>
415    where
416        Sh: Into<IxDyn>,
417    {
418        rng.sample_array(
419            Uniform::new_inclusive(min, max).expect("Operation failed"),
420            shape,
421        )
422    }
423
424    /// Generate an array of random floating-point values in a range
425    pub fn random_floats<R: rand::Rng, Sh>(
426        rng: &mut Random<R>,
427        min: f64,
428        max: f64,
429        shape: Sh,
430    ) -> Array<f64, IxDyn>
431    where
432        Sh: Into<IxDyn>,
433    {
434        rng.sample_array(Uniform::new(min, max).expect("Operation failed"), shape)
435    }
436
437    /// Sample indices for bootstrapping (sampling with replacement)
438    pub fn bootstrap_indices<R: rand::Rng>(
439        rng: &mut Random<R>,
440        data_size: usize,
441        sample_size: usize,
442    ) -> Vec<usize> {
443        let dist = Uniform::new(0, data_size).expect("Operation failed");
444        rng.sample_vec(dist, sample_size)
445    }
446
447    /// Sample indices without replacement (for random subsampling)
448    pub fn sample_without_replacement<R: rand::Rng>(
449        rng: &mut Random<R>,
450        data_size: usize,
451        sample_size: usize,
452    ) -> Vec<usize> {
453        use rand::seq::SliceRandom;
454        let mut indices: Vec<usize> = (0..data_size).collect();
455        indices.shuffle(&mut rng.rng);
456        indices.truncate(sample_size);
457        indices
458    }
459}
460
461/// Importance sampling methods for efficient estimation
462pub mod importance_sampling {
463    use super::*;
464    use rand_distr::{Normal, Uniform};
465
466    /// Importance sampling estimator
467    #[derive(Debug)]
468    pub struct ImportanceSampler<R: rand::Rng> {
469        rng: Random<R>,
470    }
471
472    impl<R: rand::Rng> ImportanceSampler<R> {
473        /// Create a new importance sampler
474        pub fn new(rng: Random<R>) -> Self {
475            Self { rng }
476        }
477
478        /// Perform importance sampling with a given proposal distribution
479        pub fn sample_with_weights<F, G>(
480            &mut self,
481            target_pdf: F,
482            proposal_pdf: G,
483            proposal_sampler: impl Fn(&mut Random<R>) -> f64,
484            n_samples: usize,
485        ) -> (Vec<f64>, Vec<f64>)
486        where
487            F: Fn(f64) -> f64,
488            G: Fn(f64) -> f64,
489        {
490            let mut samples = Vec::with_capacity(n_samples);
491            let mut weights = Vec::with_capacity(n_samples);
492
493            for _ in 0..n_samples {
494                let sample = proposal_sampler(&mut self.rng);
495                let weight = target_pdf(sample) / proposal_pdf(sample);
496
497                samples.push(sample);
498                weights.push(weight);
499            }
500
501            (samples, weights)
502        }
503
504        /// Estimate expectation using importance sampling
505        pub fn estimate_expectation<F, G, H>(
506            &mut self,
507            function: F,
508            target_pdf: G,
509            proposal_pdf: H,
510            proposal_sampler: impl Fn(&mut Random<R>) -> f64,
511            n_samples: usize,
512        ) -> f64
513        where
514            F: Fn(f64) -> f64,
515            G: Fn(f64) -> f64,
516            H: Fn(f64) -> f64,
517        {
518            let (samples, weights) =
519                self.sample_with_weights(target_pdf, proposal_pdf, proposal_sampler, n_samples);
520
521            let weighted_sum: f64 = samples
522                .iter()
523                .zip(weights.iter())
524                .map(|(&x, &w)| function(x) * w)
525                .sum();
526
527            let weight_sum: f64 = weights.iter().sum();
528
529            weighted_sum / weight_sum
530        }
531
532        /// Adaptive importance sampling with mixture proposal
533        pub fn adaptive_sampling<F>(
534            &mut self,
535            target_log_pdf: F,
536            initial_samples: usize,
537            adaptation_rounds: usize,
538        ) -> Vec<f64>
539        where
540            F: Fn(f64) -> f64,
541        {
542            let mut samples = Vec::new();
543            let mut proposal_mean: f64 = 0.0;
544            let mut proposal_std: f64 = 1.0;
545
546            for round in 0..adaptation_rounds {
547                let round_samples = if round == 0 {
548                    initial_samples
549                } else {
550                    initial_samples / 2
551                };
552                let normal_dist =
553                    Normal::new(proposal_mean, proposal_std).expect("Operation failed");
554
555                let mut round_sample_vec = Vec::new();
556                let mut weights = Vec::new();
557
558                for _ in 0..round_samples {
559                    let sample = self.rng.sample(normal_dist);
560
561                    // Manual calculation of log PDF for normal distribution
562                    let normal_log_pdf = -0.5 * ((sample - proposal_mean) / proposal_std).powi(2)
563                        - 0.5 * (2.0 * std::f64::consts::PI).ln()
564                        - proposal_std.ln();
565                    let log_weight = target_log_pdf(sample) - normal_log_pdf;
566
567                    round_sample_vec.push(sample);
568                    weights.push(log_weight.exp());
569                }
570
571                // Update proposal parameters based on weighted samples
572                let weight_sum: f64 = weights.iter().sum();
573                if weight_sum > 0.0 {
574                    let normalized_weights: Vec<f64> =
575                        weights.iter().map(|w| w / weight_sum).collect();
576
577                    proposal_mean = round_sample_vec
578                        .iter()
579                        .zip(normalized_weights.iter())
580                        .map(|(&x, &w)| x * w)
581                        .sum();
582
583                    let variance = round_sample_vec
584                        .iter()
585                        .zip(normalized_weights.iter())
586                        .map(|(&x, &w)| w * (x - proposal_mean).powi(2))
587                        .sum::<f64>();
588
589                    proposal_std = variance.sqrt().max(0.1); // Prevent collapse
590                }
591
592                samples.extend(round_sample_vec);
593            }
594
595            samples
596        }
597    }
598
599    impl ImportanceSampler<rand::rngs::ThreadRng> {
600        /// Create importance sampler with default RNG
601        pub fn with_default_rng() -> Self {
602            Self::new(Random::default())
603        }
604    }
605}
606
607/// GPU-accelerated random number generation (when available)
608#[cfg(feature = "gpu")]
609pub mod gpu {
610    // GPU acceleration implementation would go here
611    // This is a placeholder for future GPU support
612    pub struct GpuRng;
613
614    impl Default for GpuRng {
615        fn default() -> Self {
616            Self::new()
617        }
618    }
619
620    impl GpuRng {
621        pub fn new() -> Self {
622            Self
623        }
624    }
625}
626
627/// Legacy Random struct wrapper for backward compatibility
628/// This provides the same interface as the original Random struct
629/// while delegating to the new modular implementation
630#[derive(Debug)]
631pub struct Random<R: rand::Rng + ?Sized = rand::rngs::ThreadRng> {
632    pub(crate) rng: R,
633}
634
635impl Default for Random<rand::rngs::ThreadRng> {
636    fn default() -> Self {
637        Self { rng: rand::rng() }
638    }
639}
640
641impl<R: rand::Rng + Clone> Clone for Random<R> {
642    fn clone(&self) -> Self {
643        Self {
644            rng: self.rng.clone(),
645        }
646    }
647}
648
649impl<R: rand::Rng> Random<R> {
650    /// Sample a value from a distribution
651    pub fn sample<D, T>(&mut self, distribution: D) -> T
652    where
653        D: rand_distr::Distribution<T>,
654    {
655        use rand_distr::Distribution;
656        distribution.sample(&mut self.rng)
657    }
658
659    /// Generate a random value between two bounds (inclusive min, exclusive max)
660    pub fn random_range_bounds<T: rand_distr::uniform::SampleUniform + PartialOrd + Copy>(
661        &mut self,
662        min: T,
663        max: T,
664    ) -> T {
665        self.sample(rand_distr::Uniform::new(min, max).expect("Operation failed"))
666    }
667
668    /// Generate a random value within the given range (using range syntax)
669    pub fn gen_range<T, RNG>(&mut self, range: RNG) -> T
670    where
671        T: rand_distr::uniform::SampleUniform,
672        RNG: rand_distr::uniform::SampleRange<T>,
673    {
674        rand::Rng::random_range(&mut self.rng, range)
675    }
676
677    /// Generate a random value within the given range (rand-compatible range syntax)
678    pub fn random_range<T, RNG>(&mut self, range: RNG) -> T
679    where
680        T: rand_distr::uniform::SampleUniform,
681        RNG: rand_distr::uniform::SampleRange<T>,
682    {
683        rand::Rng::random_range(&mut self.rng, range)
684    }
685
686    /// Generate a random f64 value between 0.0 and 1.0
687    pub fn random_f64(&mut self) -> f64 {
688        self.sample(rand_distr::Uniform::new(0.0, 1.0).expect("Operation failed"))
689    }
690
691    /// Generate a random f64 value using the underlying RNG (convenience method)
692    pub fn random_f64_raw(&mut self) -> f64 {
693        rand::Rng::random(&mut self.rng)
694    }
695
696    /// Generate a random boolean value
697    pub fn random_bool(&mut self) -> bool {
698        use rand_distr::Distribution;
699        let dist = rand_distr::Bernoulli::new(0.5).expect("Operation failed");
700        dist.sample(&mut self.rng)
701    }
702
703    /// Generate a random boolean with the given probability of being true
704    pub fn random_bool_with_chance(&mut self, prob: f64) -> bool {
705        use rand_distr::Distribution;
706        let dist = rand_distr::Bernoulli::new(prob).expect("Operation failed");
707        dist.sample(&mut self.rng)
708    }
709
710    /// Shuffle a slice randomly
711    pub fn shuffle<T>(&mut self, slice: &mut [T]) {
712        use rand::seq::SliceRandom;
713        slice.shuffle(&mut self.rng);
714    }
715
716    /// Generate a vector of values sampled from a distribution
717    pub fn sample_vec<D, T>(&mut self, distribution: D, size: usize) -> Vec<T>
718    where
719        D: rand_distr::Distribution<T> + Copy,
720    {
721        (0..size)
722            .map(|_| distribution.sample(&mut self.rng))
723            .collect()
724    }
725
726    /// Generate an crate::ndarray::Array from samples of a distribution
727    pub fn sample_array<D, T, Sh>(
728        &mut self,
729        distribution: D,
730        shape: Sh,
731    ) -> crate::ndarray::Array<T, crate::ndarray::IxDyn>
732    where
733        D: rand_distr::Distribution<T> + Copy,
734        Sh: Into<crate::ndarray::IxDyn>,
735    {
736        let shape = shape.into();
737        let size = shape.size();
738        let values = self.sample_vec(distribution, size);
739        crate::ndarray::Array::from_shape_vec(shape, values).expect("Operation failed")
740    }
741}
742
743impl Random<rand::rngs::ThreadRng> {
744    /// Create a new random number generator with a specific seed
745    pub fn seed(seed: u64) -> Random<rand::rngs::StdRng> {
746        Random {
747            rng: rand::SeedableRng::seed_from_u64(seed),
748        }
749    }
750}
751
752// Implement required traits for the legacy Random struct
753impl<R: rand::RngCore> rand::RngCore for Random<R> {
754    fn next_u32(&mut self) -> u32 {
755        self.rng.next_u32()
756    }
757
758    fn next_u64(&mut self) -> u64 {
759        self.rng.next_u64()
760    }
761
762    fn fill_bytes(&mut self, dest: &mut [u8]) {
763        self.rng.fill_bytes(dest)
764    }
765}
766
767impl rand::SeedableRng for Random<rand::rngs::StdRng> {
768    type Seed = <rand::rngs::StdRng as rand::SeedableRng>::Seed;
769
770    fn from_seed(seed: Self::Seed) -> Self {
771        Random {
772            rng: rand::rngs::StdRng::from_seed(seed),
773        }
774    }
775
776    fn seed_from_u64(state: u64) -> Self {
777        Random {
778            rng: rand::rngs::StdRng::seed_from_u64(state),
779        }
780    }
781}
782
783/// Thread-local random number generator for convenient access (legacy compatibility)
784use std::cell::RefCell;
785thread_local! {
786    static THREAD_RNG: RefCell<Random> = RefCell::new(Random::default());
787}
788
789/// Get a reference to the thread-local random number generator (legacy compatibility)
790#[allow(dead_code)]
791pub fn get_rng<F, R>(f: F) -> R
792where
793    F: FnOnce(&mut Random) -> R,
794{
795    THREAD_RNG.with(|rng| f(&mut rng.borrow_mut()))
796}
797
798/// Deterministic random sequence generator for testing (legacy compatibility)
799pub struct DeterministicSequence {
800    seed: u64,
801    counter: u64,
802}
803
804impl DeterministicSequence {
805    /// Create a new deterministic sequence with the given seed
806    pub fn seed(seed: u64) -> Self {
807        Self { seed, counter: 0 }
808    }
809
810    /// Generate the next value in the sequence
811    pub fn next_f64(&mut self) -> f64 {
812        // Simple deterministic hash function for testing purposes
813        let mut x = self.counter.wrapping_add(self.seed);
814        x = ((x >> 16) ^ x).wrapping_mul(0x45d9f3b);
815        x = ((x >> 16) ^ x).wrapping_mul(0x45d9f3b);
816        x = (x >> 16) ^ x;
817
818        self.counter = self.counter.wrapping_add(1);
819
820        // Convert to f64 in [0, 1) range
821        (x as f64) / (u64::MAX as f64)
822    }
823
824    /// Reset the sequence to its initial state
825    pub fn reset(&mut self) {
826        self.counter = 0;
827    }
828
829    /// Get a vector of deterministic values
830    pub fn get_vec(&mut self, size: usize) -> Vec<f64> {
831        (0..size).map(|_| self.next_f64()).collect()
832    }
833
834    /// Get an crate::ndarray::Array of deterministic values
835    pub fn get_array<Sh>(&mut self, shape: Sh) -> crate::ndarray::Array<f64, crate::ndarray::IxDyn>
836    where
837        Sh: Into<crate::ndarray::IxDyn>,
838    {
839        let shape = shape.into();
840        let size = shape.size();
841        let values = self.get_vec(size);
842        crate::ndarray::Array::from_shape_vec(shape, values).expect("Operation failed")
843    }
844}
845
846// ===============================
847// Enhanced Type Aliases & Exports
848// ===============================
849
850/// Convenient type aliases for common RNG types
851pub type ThreadRng = Random<rand::rngs::ThreadRng>;
852pub type StdRng = Random<rand::rngs::StdRng>;
853
854/// Common distribution type aliases
855pub type UniformDist = rand_distributions::Uniform<f64>;
856pub type NormalDist = rand_distributions::Normal<f64>;
857pub type ExponentialDist = rand_distributions::Exp<f64>;
858
859/// Array type aliases for convenience
860pub type Array1D<T> = crate::ndarray::Array1<T>;
861pub type Array2D<T> = crate::ndarray::Array2<T>;
862pub type Array3D<T> = crate::ndarray::Array3<T>;
863
864// ===============================
865// Workflow Module Aliases
866// ===============================
867
868/// Alias for quick access to rapid prototyping functions
869pub use quick as rapid;
870
871/// Alias for scientific computing workflows
872pub use scientific as research;
873
874/// Alias for machine learning workflows
875pub use ml as machine_learning;
876
877/// Alias for cryptographic random generation
878pub use secure as crypto;
879
880// ===============================
881// Legacy Compatibility Modules
882// ===============================
883
884/// Legacy module structure for backward compatibility
885pub mod quasi_monte_carlo {
886    pub use crate::random::qmc::*;
887
888    // Legacy type aliases for backward compatibility
889    pub type SobolSequence = crate::random::qmc::SobolGenerator;
890    pub type HaltonSequence = crate::random::qmc::HaltonGenerator;
891    pub type LatinHypercubeSampling = crate::random::qmc::LatinHypercubeSampler;
892}
893
894/// Legacy module structure for backward compatibility
895pub mod specialized_distributions {
896    pub use crate::random::distributions::*;
897}
898
899/// Legacy module structure for backward compatibility
900pub mod optimized_arrays {
901    pub use crate::random::arrays::*;
902}
903
904/// Legacy slice operations
905pub mod slice_random {
906    pub use crate::random::slice_ops::convenience::*;
907}
908
909// ===============================
910// Enhanced Feature-Based Exports
911// ===============================
912
913/// All essential items for most use cases
914pub mod essentials {
915    pub use crate::random::rand_distributions::{Normal, Uniform};
916    pub use crate::random::{
917        random_normal_array, random_uniform_array, seeded_rng, thread_rng, Beta, Categorical,
918        Random, Rng, RngCore, SeedableRng, WeightedChoice,
919    };
920}
921
922/// Advanced statistical functionality
923pub mod statistics {
924    pub use crate::random::{
925        AntitheticSampling, Beta, Categorical, CommonRatio, ControlVariate, Dirichlet,
926        ExponentialDist, GammaDist, HaltonGenerator, LatinHypercubeSampler, MultivariateNormal,
927        SobolGenerator, VonMises, WeightedChoice,
928    };
929}
930
931/// High-performance computing functionality
932pub mod hpc {
933    pub use crate::random::{
934        random_he_weights, random_normal_array, random_uniform_array, random_xavier_weights,
935        BatchRng, DistributedRngPool, OptimizedArrayRandom, ParallelRng, ThreadLocalRngPool,
936    };
937}
938
939/// 🚀 **Cutting-edge algorithms**
940pub mod cutting_edge {
941    pub use crate::random::{
942        advanced_numerical::*, cutting_edge_mcmc::*, ecosystem_integration::*, neural_sampling::*,
943        quantum_inspired::*,
944    };
945}
946
947/// Advanced MCMC and Bayesian inference
948pub mod bayesian {
949    pub use crate::random::{
950        EllipticalSliceSampler, HamiltonianMonteCarlo, ImportanceSampler, NoUTurnSampler,
951        ParallelTempering, SteinVariationalGradientDescent,
952    };
953    // AdaptiveMetropolisHastings is available through the cutting_edge module
954}
955
956/// Neural and AI-based sampling methods
957pub mod ai_sampling {
958    pub use crate::random::{
959        DiffusionConfig, EnergyBasedModel, NeuralBridge, NeuralPosteriorEstimation,
960        NormalizingFlow, ScoreBasedDiffusion,
961    };
962}
963
964/// Quantum-inspired computational methods
965pub mod quantum {
966    pub use crate::random::{
967        CoinParameters, QuantumAmplitudeAmplification, QuantumInspiredAnnealing,
968        QuantumInspiredEvolutionary, QuantumWalk,
969    };
970}
971
972/// Advanced numerical methods and optimization
973pub mod numerical_methods {
974    pub use crate::random::{
975        AdaptiveResult, AdaptiveSampler, ImportanceResult, MLMCResult, MultiLevelMonteCarlo,
976        SequentialMonteCarlo,
977    };
978}
979
980/// Ecosystem integration and bridge utilities
981pub mod bridges {
982    pub use crate::random::{
983        AugmentationConfig, ExperimentalDesign, LinalgBridge, NeuralBridge, OptimizationBridge,
984        StatsBridge, SyntheticDataset,
985    };
986}