Skip to main content

scirs2_core/random/
mod.rs

1//! Ultra-advanced random number generation for SCIRS2 ecosystem
2//!
3//! This module provides the most comprehensive and cutting-edge random number generation
4//! capabilities available, designed specifically for scientific computing, machine learning,
5//! and quantum-inspired algorithms with unparalleled features for reproducibility,
6//! performance, and specialized ultra-modern distributions.
7//!
8//! ## Quick Start
9//!
10//! ```rust
11//! // For quick prototyping - use the quick module
12//! use scirs2_core::random::quick::*;
13//! let x = random_f64();
14//! let data = random_vector(100); // Use smaller data for doc tests
15//!
16//! // For scientific computing - use the prelude
17//! use scirs2_core::random::prelude::*;
18//! let mut rng = thread_rng();
19//! let sample = rng.sample(Normal::new(0.0, 1.0).expect("Operation failed"));
20//! ```
21//!
22//! ## Module Organization
23//!
24//! ### 🚀 **Cutting-Edge Modules**
25//! - [`cutting_edge_mcmc`] - HMC, NUTS, SVGD, and advanced MCMC methods
26//! - [`neural_sampling`] - Normalizing flows, VAE, diffusion models
27//! - [`quantum_inspired`] - Quantum algorithms for classical computation
28//! - [`advanced_numerical`] - Multi-level Monte Carlo, adaptive sampling
29//! - [`ecosystem_integration`] - Seamless SCIRS2 module interoperability
30//!
31//! ### 🎯 **Workflow-Based Modules**
32//! - [`prelude`] - Most commonly used items (Rust idiom)
33//! - [`quick`] - Rapid prototyping with minimal setup
34//! - [`scientific`] - Research and scientific computing workflows
35//! - [`ml`] - Machine learning specific utilities
36//!
37//! ### âš¡ **Core Implementation Modules**
38//! - [`core`] - Core Random struct and fundamental operations
39//! - [`distributions`] - Advanced statistical distributions
40//! - [`arrays`] - Optimized bulk array generation
41//! - [`slice_ops`] - Enhanced slice operations and sampling
42//!
43//! ### 🔬 **Specialized Modules**
44//! - [`qmc`] - Quasi-Monte Carlo sequences (Sobol, Halton, LHS)
45//! - [`variance_reduction`] - Monte Carlo variance reduction techniques
46//! - [`secure`] - Cryptographically secure random generation
47//! - [`parallel`] - Thread-safe parallel random generation
48
49// Core random functionality
50pub mod core;
51pub mod seq;
52pub mod slice_ops;
53
54// Advanced distributions and sampling
55pub mod arrays;
56pub mod distributions;
57pub mod distributions_unified;
58
59// Monte Carlo and variance reduction
60pub mod qmc;
61pub mod variance_reduction;
62
63// Security and parallel computing
64pub mod parallel;
65pub mod secure;
66
67// Enhanced workflow-based modules
68pub mod ml;
69pub mod prelude;
70pub mod quick;
71pub mod scientific;
72
73// Cutting-edge modules
74pub mod advanced_numerical;
75pub mod cutting_edge_mcmc;
76pub mod ecosystem_integration;
77pub mod neural_sampling;
78pub mod quantum_inspired;
79
80// Re-export core functionality (except Random which we redefine for compatibility)
81// Use self::core to avoid ambiguity with Rust's standard library ::core (critical for Windows builds)
82pub use self::core::{seeded_rng, thread_rng, DistributionExt};
83
84// Re-export RNG types for SCIRS2 POLICY compliance
85pub use rand_chacha::{ChaCha12Rng, ChaCha20Rng, ChaCha8Rng};
86
87// Re-export the core Random as CoreRandom for internal use
88pub use self::core::Random as CoreRandom;
89
90// Re-export enhanced slice operations
91pub use slice_ops::{ScientificSliceRandom, SliceRandomExt};
92
93// Note: seq module is available as scirs2_core::random::seq
94
95// Re-export slice convenience functions under different name to avoid conflict
96// pub use slice_ops::convenience as slice_convenience;
97
98// Re-export specialized distributions
99pub use distributions::{
100    Beta, Categorical, Dirichlet, GammaDist, MultivariateNormal, VonMises, WeightedChoice,
101};
102
103// Re-export unified distribution interface for ecosystem compatibility
104pub use distributions_unified::{
105    UnifiedBeta, UnifiedBinomial, UnifiedCauchy, UnifiedChiSquared, UnifiedDirichlet,
106    UnifiedDistribution, UnifiedDistributionError, UnifiedExp, UnifiedFisherF, UnifiedGamma,
107    UnifiedLogNormal, UnifiedNormal, UnifiedPoisson, UnifiedStudentT, UnifiedWeibull,
108};
109
110// Re-export optimized array operations
111pub use arrays::{
112    random_exponential_array, random_gamma_array, random_he_weights, random_normal_array,
113    random_sparse_array, random_uniform_array, random_xavier_weights, OptimizedArrayRandom,
114};
115
116// Re-export variance reduction techniques
117pub use variance_reduction::{
118    AntitheticSampling, CommonRatio, ControlVariate, ImportanceSplitting,
119};
120
121// Re-export QMC sequences
122pub use qmc::{
123    HaltonGenerator, LatinHypercubeSampler, LowDiscrepancySequence, QmcError, SobolGenerator,
124};
125
126// Re-export secure random generation
127pub use secure::{utils as secure_utils, SecureRandom, SecureRngPool};
128
129// Re-export parallel operations
130pub use parallel::{BatchRng, DistributedRngPool, ParallelRng, ThreadLocalRngPool};
131
132// Re-export cutting-edge algorithms
133pub use advanced_numerical::{
134    AdaptiveResult, AdaptiveSampler, ImportanceResult, ImportanceSampler, MLMCResult,
135    MultiLevelMonteCarlo, SequentialMonteCarlo,
136};
137
138pub use cutting_edge_mcmc::{
139    EllipticalSliceSampler, HamiltonianMonteCarlo, NoUTurnSampler, ParallelTempering,
140    SteinVariationalGradientDescent,
141};
142
143pub use neural_sampling::{
144    DiffusionConfig, EnergyBasedModel, NeuralPosteriorEstimation, NormalizingFlow,
145    ScoreBasedDiffusion,
146};
147
148pub use quantum_inspired::{
149    CoinParameters, QuantumAmplitudeAmplification, QuantumInspiredAnnealing,
150    QuantumInspiredEvolutionary, QuantumWalk,
151};
152
153pub use ecosystem_integration::{
154    AugmentationConfig, ExperimentalDesign, LinalgBridge, NeuralBridge, OptimizationBridge,
155    StatsBridge, SyntheticDataset,
156};
157
158// Re-export external dependencies for convenience
159pub use ::ndarray::Dimension;
160pub use rand::prelude as rand_prelude;
161/// Backward-compatible re-export: `RngCore` is deprecated in rand_core 0.10
162/// (auto-implemented for all `Rng`). Kept for downstream crate compatibility.
163#[allow(deprecated)]
164pub use rand::rand_core::RngCore;
165pub use rand::rngs;
166pub use rand::seq::SliceRandom;
167pub use rand::{Rng, RngExt, SeedableRng, TryRng};
168pub use rand_distr as rand_distributions;
169pub use rand_distr::uniform;
170
171/// Convenience function to generate a random value of the inferred type
172///
173/// This function generates a random value using the thread-local RNG.
174/// The type is inferred from context, or can be specified explicitly.
175///
176/// # Examples
177///
178/// ```
179/// use scirs2_core::random::random;
180///
181/// // Generate random f64
182/// let x: f64 = random();
183/// assert!(x >= 0.0 && x < 1.0);
184///
185/// // Generate random bool
186/// let b: bool = random();
187///
188/// // Explicit type annotation
189/// let y = random::<f32>();
190/// ```
191pub fn random<T>() -> T
192where
193    rand::distr::StandardUniform: rand::distr::Distribution<T>,
194{
195    rand::random()
196}
197
198/// Convenience function to create a thread-local RNG
199///
200/// This is equivalent to `thread_rng()` but provides a shorter name
201/// for compatibility with code that uses `rng()`.
202///
203/// # Examples
204///
205/// ```
206/// use scirs2_core::random::rng;
207/// use scirs2_core::random::{Rng, RngExt};
208///
209/// let mut rng = rng();
210/// let x: f64 = rng.random();
211/// ```
212pub fn rng() -> rand::rngs::ThreadRng {
213    rand::rng()
214}
215
216// Comprehensive re-export of ALL rand_distr distributions for SciRS2 ecosystem compatibility
217// This ensures other projects can access any distribution through scirs2-core
218pub use rand_distr::{
219    // Other distributions
220    Alphanumeric,
221    // Discrete distributions
222    Bernoulli as RandBernoulli,
223    // Continuous distributions
224    Beta as RandBeta,
225    Binomial,
226    Cauchy,
227    ChiSquared,
228    // Multivariate distributions (Dirichlet moved to rand_distr::multi in 0.6)
229    // Distribution trait
230    Distribution,
231    Exp,
232    FisherF,
233    Gamma as RandGamma,
234    Geometric,
235    Hypergeometric,
236    InverseGaussian,
237    LogNormal,
238    Normal as RandNormal,
239    Open01,
240    OpenClosed01,
241    Pareto,
242    Pert,
243    Poisson,
244    StandardNormal,
245    StudentT,
246    Triangular,
247    Uniform as RandUniform,
248    UnitBall,
249    UnitCircle,
250    UnitDisc,
251    UnitSphere,
252    Weibull,
253    Zeta,
254    Zipf,
255};
256
257// Re-export WeightedIndex from weighted submodule
258pub use rand_distr::weighted::WeightedIndex;
259
260// Dirichlet moved to rand_distr::multi in rand_distr 0.6
261pub use rand_distr::multi::Dirichlet as RandDirichlet;
262
263// Clean, unprefixed type aliases for common distributions (for easier use)
264// These allow `use scirs2_core::random::Normal;` instead of `use scirs2_core::random::RandNormal;`
265pub use rand_distr::Bernoulli;
266pub use rand_distr::Exp as Exponential; // Exponential is just Exp in rand_distr
267pub use rand_distr::Gamma;
268pub use rand_distr::Normal;
269pub use rand_distr::Uniform;
270
271// Re-export ndarray-rand RandomExt trait if available
272#[cfg(feature = "random")]
273pub use ndarray_rand::RandomExt;
274
275// Compatibility layer for systems without random feature
276#[cfg(not(feature = "random"))]
277pub trait RandomExt<T, D> {
278    fn random_using<R: rand::Rng>(
279        shape: D,
280        distribution: impl rand_distr::Distribution<T>,
281        rng: &mut R,
282    ) -> Self;
283}
284
285#[cfg(not(feature = "random"))]
286impl<T, D> RandomExt<T, D> for crate::ndarray::ArrayBase<crate::ndarray::OwnedRepr<T>, D>
287where
288    D: crate::ndarray::Dimension,
289{
290    fn random_using<R: rand::Rng>(
291        shape: D,
292        distribution: impl rand_distr::Distribution<T>,
293        rng: &mut R,
294    ) -> Self {
295        let size = shape.size();
296        let mut data = Vec::with_capacity(size);
297        for _ in 0..size {
298            data.push(distribution.sample(rng));
299        }
300        Self::from_shape_vec(shape, data).expect("Operation failed")
301    }
302}
303
304/// Legacy compatibility functions for backward compatibility
305pub mod legacy {
306    use super::*;
307    use rand_distr::Uniform;
308
309    /// Compatibility wrapper for updated rand API
310    pub fn rng() -> Random<rand::rngs::ThreadRng> {
311        Random { rng: rand::rng() }
312    }
313
314    /// Generate a random f64 value between 0.0 and 1.0
315    pub fn f64() -> f64 {
316        rand::random::<f64>()
317    }
318
319    /// Generate a random f32 value between 0.0 and 1.0
320    pub fn f32() -> f32 {
321        rand::random::<f32>()
322    }
323
324    /// Generate a random usize value in the given range
325    pub fn usize(range: std::ops::Range<usize>) -> usize {
326        rand::rng().random_range(range)
327    }
328}
329
330/// High-level convenience functions for common operations
331pub mod convenience {
332    use super::*;
333    use ::ndarray::{Array, Dimension, IxDyn};
334    use rand_distr::{Distribution, Normal, Uniform};
335
336    /// Generate a uniform random number in [0, 1)
337    pub fn uniform() -> f64 {
338        let mut rng = thread_rng();
339        rng.sample(Uniform::new(0.0, 1.0).expect("Operation failed"))
340    }
341
342    /// Generate a standard normal random number (mean=0, std=1)
343    pub fn normal() -> f64 {
344        let mut rng = thread_rng();
345        rng.sample(Normal::new(0.0, 1.0).expect("Operation failed"))
346    }
347
348    /// Generate a random integer in the given range
349    pub fn integer(min: i64, max: i64) -> i64 {
350        let mut rng = thread_rng();
351        rng.sample(Uniform::new_inclusive(min, max).expect("Operation failed"))
352    }
353
354    /// Generate a random boolean
355    pub fn boolean() -> bool {
356        let mut rng = thread_rng();
357        rng.random_bool(0.5)
358    }
359
360    /// Generate a random array with uniform distribution
361    pub fn uniform_array<Sh: Into<IxDyn>>(shape: Sh) -> Array<f64, IxDyn> {
362        let mut rng = thread_rng();
363        let shape = shape.into();
364        let size = shape.size();
365        let values: Vec<f64> = (0..size)
366            .map(|_| rng.sample(Uniform::new(0.0, 1.0).expect("Operation failed")))
367            .collect();
368        Array::from_shape_vec(shape, values).expect("Operation failed")
369    }
370
371    /// Generate a random array with normal distribution
372    pub fn normal_array<Sh: Into<IxDyn>>(shape: Sh, mean: f64, std: f64) -> Array<f64, IxDyn> {
373        let mut rng = thread_rng();
374        let shape = shape.into();
375        let size = shape.size();
376        let values: Vec<f64> = (0..size)
377            .map(|_| rng.sample(Normal::new(mean, std).expect("Operation failed")))
378            .collect();
379        Array::from_shape_vec(shape, values).expect("Operation failed")
380    }
381}
382
383/// Sampling utilities for common statistical operations
384pub mod sampling {
385    use super::*;
386    use ::ndarray::{Array, Dimension, IxDyn};
387    use rand_distr::{Distribution, Exp, LogNormal, Normal, Uniform};
388
389    /// Sample uniformly from [0, 1)
390    pub fn random_uniform01<R: rand::Rng>(rng: &mut Random<R>) -> f64 {
391        rng.sample(Uniform::new(0.0, 1.0).expect("Operation failed"))
392    }
393
394    /// Sample from a standard normal distribution (mean 0, std dev 1)
395    pub fn random_standard_normal<R: rand::Rng>(rng: &mut Random<R>) -> f64 {
396        rng.sample(Normal::new(0.0, 1.0).expect("Operation failed"))
397    }
398
399    /// Sample from a normal distribution with given mean and standard deviation
400    pub fn random_normal<R: rand::Rng>(rng: &mut Random<R>, mean: f64, stddev: f64) -> f64 {
401        rng.sample(Normal::new(mean, stddev).expect("Operation failed"))
402    }
403
404    /// Sample from a log-normal distribution
405    pub fn random_lognormal<R: rand::Rng>(rng: &mut Random<R>, mean: f64, stddev: f64) -> f64 {
406        rng.sample(LogNormal::new(mean, stddev).expect("Operation failed"))
407    }
408
409    /// Sample from an exponential distribution
410    pub fn random_exponential<R: rand::Rng>(rng: &mut Random<R>, lambda: f64) -> f64 {
411        rng.sample(Exp::new(lambda).expect("Operation failed"))
412    }
413
414    /// Generate an array of random integers in a range
415    pub fn random_integers<R: rand::Rng, Sh>(
416        rng: &mut Random<R>,
417        min: i64,
418        max: i64,
419        shape: Sh,
420    ) -> Array<i64, IxDyn>
421    where
422        Sh: Into<IxDyn>,
423    {
424        rng.sample_array(
425            Uniform::new_inclusive(min, max).expect("Operation failed"),
426            shape,
427        )
428    }
429
430    /// Generate an array of random floating-point values in a range
431    pub fn random_floats<R: rand::Rng, Sh>(
432        rng: &mut Random<R>,
433        min: f64,
434        max: f64,
435        shape: Sh,
436    ) -> Array<f64, IxDyn>
437    where
438        Sh: Into<IxDyn>,
439    {
440        rng.sample_array(Uniform::new(min, max).expect("Operation failed"), shape)
441    }
442
443    /// Sample indices for bootstrapping (sampling with replacement)
444    pub fn bootstrap_indices<R: rand::Rng>(
445        rng: &mut Random<R>,
446        data_size: usize,
447        sample_size: usize,
448    ) -> Vec<usize> {
449        let dist = Uniform::new(0, data_size).expect("Operation failed");
450        rng.sample_vec(dist, sample_size)
451    }
452
453    /// Sample indices without replacement (for random subsampling)
454    pub fn sample_without_replacement<R: rand::Rng>(
455        rng: &mut Random<R>,
456        data_size: usize,
457        sample_size: usize,
458    ) -> Vec<usize> {
459        use rand::seq::SliceRandom;
460        let mut indices: Vec<usize> = (0..data_size).collect();
461        indices.shuffle(&mut rng.rng);
462        indices.truncate(sample_size);
463        indices
464    }
465}
466
467/// Importance sampling methods for efficient estimation
468pub mod importance_sampling {
469    use super::*;
470    use rand_distr::{Normal, Uniform};
471
472    /// Importance sampling estimator
473    #[derive(Debug)]
474    pub struct ImportanceSampler<R: rand::Rng> {
475        rng: Random<R>,
476    }
477
478    impl<R: rand::Rng> ImportanceSampler<R> {
479        /// Create a new importance sampler
480        pub fn new(rng: Random<R>) -> Self {
481            Self { rng }
482        }
483
484        /// Perform importance sampling with a given proposal distribution
485        pub fn sample_with_weights<F, G>(
486            &mut self,
487            target_pdf: F,
488            proposal_pdf: G,
489            proposal_sampler: impl Fn(&mut Random<R>) -> f64,
490            n_samples: usize,
491        ) -> (Vec<f64>, Vec<f64>)
492        where
493            F: Fn(f64) -> f64,
494            G: Fn(f64) -> f64,
495        {
496            let mut samples = Vec::with_capacity(n_samples);
497            let mut weights = Vec::with_capacity(n_samples);
498
499            for _ in 0..n_samples {
500                let sample = proposal_sampler(&mut self.rng);
501                let weight = target_pdf(sample) / proposal_pdf(sample);
502
503                samples.push(sample);
504                weights.push(weight);
505            }
506
507            (samples, weights)
508        }
509
510        /// Estimate expectation using importance sampling
511        pub fn estimate_expectation<F, G, H>(
512            &mut self,
513            function: F,
514            target_pdf: G,
515            proposal_pdf: H,
516            proposal_sampler: impl Fn(&mut Random<R>) -> f64,
517            n_samples: usize,
518        ) -> f64
519        where
520            F: Fn(f64) -> f64,
521            G: Fn(f64) -> f64,
522            H: Fn(f64) -> f64,
523        {
524            let (samples, weights) =
525                self.sample_with_weights(target_pdf, proposal_pdf, proposal_sampler, n_samples);
526
527            let weighted_sum: f64 = samples
528                .iter()
529                .zip(weights.iter())
530                .map(|(&x, &w)| function(x) * w)
531                .sum();
532
533            let weight_sum: f64 = weights.iter().sum();
534
535            weighted_sum / weight_sum
536        }
537
538        /// Adaptive importance sampling with mixture proposal
539        pub fn adaptive_sampling<F>(
540            &mut self,
541            target_log_pdf: F,
542            initial_samples: usize,
543            adaptation_rounds: usize,
544        ) -> Vec<f64>
545        where
546            F: Fn(f64) -> f64,
547        {
548            let mut samples = Vec::new();
549            let mut proposal_mean: f64 = 0.0;
550            let mut proposal_std: f64 = 1.0;
551
552            for round in 0..adaptation_rounds {
553                let round_samples = if round == 0 {
554                    initial_samples
555                } else {
556                    initial_samples / 2
557                };
558                let normal_dist =
559                    Normal::new(proposal_mean, proposal_std).expect("Operation failed");
560
561                let mut round_sample_vec = Vec::new();
562                let mut weights = Vec::new();
563
564                for _ in 0..round_samples {
565                    let sample = self.rng.sample(normal_dist);
566
567                    // Manual calculation of log PDF for normal distribution
568                    let normal_log_pdf = -0.5 * ((sample - proposal_mean) / proposal_std).powi(2)
569                        - 0.5 * (2.0 * std::f64::consts::PI).ln()
570                        - proposal_std.ln();
571                    let log_weight = target_log_pdf(sample) - normal_log_pdf;
572
573                    round_sample_vec.push(sample);
574                    weights.push(log_weight.exp());
575                }
576
577                // Update proposal parameters based on weighted samples
578                let weight_sum: f64 = weights.iter().sum();
579                if weight_sum > 0.0 {
580                    let normalized_weights: Vec<f64> =
581                        weights.iter().map(|w| w / weight_sum).collect();
582
583                    proposal_mean = round_sample_vec
584                        .iter()
585                        .zip(normalized_weights.iter())
586                        .map(|(&x, &w)| x * w)
587                        .sum();
588
589                    let variance = round_sample_vec
590                        .iter()
591                        .zip(normalized_weights.iter())
592                        .map(|(&x, &w)| w * (x - proposal_mean).powi(2))
593                        .sum::<f64>();
594
595                    proposal_std = variance.sqrt().max(0.1); // Prevent collapse
596                }
597
598                samples.extend(round_sample_vec);
599            }
600
601            samples
602        }
603    }
604
605    impl ImportanceSampler<rand::rngs::ThreadRng> {
606        /// Create importance sampler with default RNG
607        pub fn with_default_rng() -> Self {
608            Self::new(Random::default())
609        }
610    }
611}
612
613/// GPU-accelerated random number generation (when available)
614#[cfg(feature = "gpu")]
615pub mod gpu {
616    // GPU acceleration implementation would go here
617    // This is a placeholder for future GPU support
618    pub struct GpuRng;
619
620    impl Default for GpuRng {
621        fn default() -> Self {
622            Self::new()
623        }
624    }
625
626    impl GpuRng {
627        pub fn new() -> Self {
628            Self
629        }
630    }
631}
632
633/// Legacy Random struct wrapper for backward compatibility
634/// This provides the same interface as the original Random struct
635/// while delegating to the new modular implementation
636#[derive(Debug)]
637pub struct Random<R: rand::Rng + ?Sized = rand::rngs::ThreadRng> {
638    pub(crate) rng: R,
639}
640
641impl Default for Random<rand::rngs::ThreadRng> {
642    fn default() -> Self {
643        Self { rng: rand::rng() }
644    }
645}
646
647impl<R: rand::Rng + Clone> Clone for Random<R> {
648    fn clone(&self) -> Self {
649        Self {
650            rng: self.rng.clone(),
651        }
652    }
653}
654
655impl<R: rand::Rng> Random<R> {
656    /// Sample a value from a distribution
657    pub fn sample<D, T>(&mut self, distribution: D) -> T
658    where
659        D: rand_distr::Distribution<T>,
660    {
661        use rand_distr::Distribution;
662        distribution.sample(&mut self.rng)
663    }
664
665    /// Generate a random value between two bounds (inclusive min, exclusive max)
666    pub fn random_range_bounds<T: rand_distr::uniform::SampleUniform + PartialOrd + Copy>(
667        &mut self,
668        min: T,
669        max: T,
670    ) -> T {
671        self.sample(rand_distr::Uniform::new(min, max).expect("Operation failed"))
672    }
673
674    /// Generate a random value within the given range (using range syntax)
675    pub fn gen_range<T, RNG>(&mut self, range: RNG) -> T
676    where
677        T: rand_distr::uniform::SampleUniform,
678        RNG: rand_distr::uniform::SampleRange<T>,
679    {
680        rand::RngExt::random_range(&mut self.rng, range)
681    }
682
683    /// Generate a random value within the given range (rand-compatible range syntax)
684    pub fn random_range<T, RNG>(&mut self, range: RNG) -> T
685    where
686        T: rand_distr::uniform::SampleUniform,
687        RNG: rand_distr::uniform::SampleRange<T>,
688    {
689        rand::RngExt::random_range(&mut self.rng, range)
690    }
691
692    /// Generate a random f64 value between 0.0 and 1.0
693    pub fn random_f64(&mut self) -> f64 {
694        self.sample(rand_distr::Uniform::new(0.0, 1.0).expect("Operation failed"))
695    }
696
697    /// Generate a random f64 value using the underlying RNG (convenience method)
698    pub fn random_f64_raw(&mut self) -> f64 {
699        rand::RngExt::random(&mut self.rng)
700    }
701
702    /// Generate a random boolean value
703    pub fn random_bool(&mut self) -> bool {
704        use rand_distr::Distribution;
705        let dist = rand_distr::Bernoulli::new(0.5).expect("Operation failed");
706        dist.sample(&mut self.rng)
707    }
708
709    /// Generate a random boolean with the given probability of being true
710    pub fn random_bool_with_chance(&mut self, prob: f64) -> bool {
711        use rand_distr::Distribution;
712        let dist = rand_distr::Bernoulli::new(prob).expect("Operation failed");
713        dist.sample(&mut self.rng)
714    }
715
716    /// Shuffle a slice randomly
717    pub fn shuffle<T>(&mut self, slice: &mut [T]) {
718        use rand::seq::SliceRandom;
719        slice.shuffle(&mut self.rng);
720    }
721
722    /// Generate a vector of values sampled from a distribution
723    pub fn sample_vec<D, T>(&mut self, distribution: D, size: usize) -> Vec<T>
724    where
725        D: rand_distr::Distribution<T> + Copy,
726    {
727        (0..size)
728            .map(|_| distribution.sample(&mut self.rng))
729            .collect()
730    }
731
732    /// Generate an crate::ndarray::Array from samples of a distribution
733    pub fn sample_array<D, T, Sh>(
734        &mut self,
735        distribution: D,
736        shape: Sh,
737    ) -> crate::ndarray::Array<T, crate::ndarray::IxDyn>
738    where
739        D: rand_distr::Distribution<T> + Copy,
740        Sh: Into<crate::ndarray::IxDyn>,
741    {
742        let shape = shape.into();
743        let size = shape.size();
744        let values = self.sample_vec(distribution, size);
745        crate::ndarray::Array::from_shape_vec(shape, values).expect("Operation failed")
746    }
747}
748
749impl Random<rand::rngs::ThreadRng> {
750    /// Create a new random number generator with a specific seed
751    pub fn seed(seed: u64) -> Random<rand::rngs::StdRng> {
752        Random {
753            rng: rand::SeedableRng::seed_from_u64(seed),
754        }
755    }
756}
757
758// Implement TryRng for the legacy Random struct (rand_core 0.10 trait hierarchy).
759// TryRng<Error=Infallible> auto-provides Rng and (deprecated) RngCore.
760impl<R: rand::Rng> rand::TryRng for Random<R> {
761    type Error = std::convert::Infallible;
762
763    fn try_next_u32(&mut self) -> Result<u32, Self::Error> {
764        Ok(self.rng.next_u32())
765    }
766
767    fn try_next_u64(&mut self) -> Result<u64, Self::Error> {
768        Ok(self.rng.next_u64())
769    }
770
771    fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), Self::Error> {
772        self.rng.fill_bytes(dest);
773        Ok(())
774    }
775}
776
777impl rand::SeedableRng for Random<rand::rngs::StdRng> {
778    type Seed = <rand::rngs::StdRng as rand::SeedableRng>::Seed;
779
780    fn from_seed(seed: Self::Seed) -> Self {
781        Random {
782            rng: rand::rngs::StdRng::from_seed(seed),
783        }
784    }
785
786    fn seed_from_u64(state: u64) -> Self {
787        Random {
788            rng: rand::rngs::StdRng::seed_from_u64(state),
789        }
790    }
791}
792
793/// Thread-local random number generator for convenient access (legacy compatibility)
794use std::cell::RefCell;
795thread_local! {
796    static THREAD_RNG: RefCell<Random> = RefCell::new(Random::default());
797}
798
799/// Get a reference to the thread-local random number generator (legacy compatibility)
800#[allow(dead_code)]
801pub fn get_rng<F, R>(f: F) -> R
802where
803    F: FnOnce(&mut Random) -> R,
804{
805    THREAD_RNG.with(|rng| f(&mut rng.borrow_mut()))
806}
807
808/// Deterministic random sequence generator for testing (legacy compatibility)
809pub struct DeterministicSequence {
810    seed: u64,
811    counter: u64,
812}
813
814impl DeterministicSequence {
815    /// Create a new deterministic sequence with the given seed
816    pub fn seed(seed: u64) -> Self {
817        Self { seed, counter: 0 }
818    }
819
820    /// Generate the next value in the sequence
821    pub fn next_f64(&mut self) -> f64 {
822        // Simple deterministic hash function for testing purposes
823        let mut x = self.counter.wrapping_add(self.seed);
824        x = ((x >> 16) ^ x).wrapping_mul(0x45d9f3b);
825        x = ((x >> 16) ^ x).wrapping_mul(0x45d9f3b);
826        x = (x >> 16) ^ x;
827
828        self.counter = self.counter.wrapping_add(1);
829
830        // Convert to f64 in [0, 1) range
831        (x as f64) / (u64::MAX as f64)
832    }
833
834    /// Reset the sequence to its initial state
835    pub fn reset(&mut self) {
836        self.counter = 0;
837    }
838
839    /// Get a vector of deterministic values
840    pub fn get_vec(&mut self, size: usize) -> Vec<f64> {
841        (0..size).map(|_| self.next_f64()).collect()
842    }
843
844    /// Get an crate::ndarray::Array of deterministic values
845    pub fn get_array<Sh>(&mut self, shape: Sh) -> crate::ndarray::Array<f64, crate::ndarray::IxDyn>
846    where
847        Sh: Into<crate::ndarray::IxDyn>,
848    {
849        let shape = shape.into();
850        let size = shape.size();
851        let values = self.get_vec(size);
852        crate::ndarray::Array::from_shape_vec(shape, values).expect("Operation failed")
853    }
854}
855
856// ===============================
857// Enhanced Type Aliases & Exports
858// ===============================
859
860/// Convenient type aliases for common RNG types
861pub type ThreadRng = Random<rand::rngs::ThreadRng>;
862pub type StdRng = Random<rand::rngs::StdRng>;
863
864/// Common distribution type aliases
865pub type UniformDist = rand_distributions::Uniform<f64>;
866pub type NormalDist = rand_distributions::Normal<f64>;
867pub type ExponentialDist = rand_distributions::Exp<f64>;
868
869/// Array type aliases for convenience
870pub type Array1D<T> = crate::ndarray::Array1<T>;
871pub type Array2D<T> = crate::ndarray::Array2<T>;
872pub type Array3D<T> = crate::ndarray::Array3<T>;
873
874// ===============================
875// Workflow Module Aliases
876// ===============================
877
878/// Alias for quick access to rapid prototyping functions
879pub use quick as rapid;
880
881/// Alias for scientific computing workflows
882pub use scientific as research;
883
884/// Alias for machine learning workflows
885pub use ml as machine_learning;
886
887/// Alias for cryptographic random generation
888pub use secure as crypto;
889
890// ===============================
891// Legacy Compatibility Modules
892// ===============================
893
894/// Legacy module structure for backward compatibility
895pub mod quasi_monte_carlo {
896    pub use crate::random::qmc::*;
897
898    // Legacy type aliases for backward compatibility
899    pub type SobolSequence = crate::random::qmc::SobolGenerator;
900    pub type HaltonSequence = crate::random::qmc::HaltonGenerator;
901    pub type LatinHypercubeSampling = crate::random::qmc::LatinHypercubeSampler;
902}
903
904/// Legacy module structure for backward compatibility
905pub mod specialized_distributions {
906    pub use crate::random::distributions::*;
907}
908
909/// Legacy module structure for backward compatibility
910pub mod optimized_arrays {
911    pub use crate::random::arrays::*;
912}
913
914/// Legacy slice operations
915pub mod slice_random {
916    pub use crate::random::slice_ops::convenience::*;
917}
918
919// ===============================
920// Enhanced Feature-Based Exports
921// ===============================
922
923/// All essential items for most use cases
924pub mod essentials {
925    pub use crate::random::rand_distributions::{Normal, Uniform};
926    pub use crate::random::{
927        random_normal_array, random_uniform_array, seeded_rng, thread_rng, Beta, Categorical,
928        Random, Rng, SeedableRng, WeightedChoice,
929    };
930}
931
932/// Advanced statistical functionality
933pub mod statistics {
934    pub use crate::random::{
935        AntitheticSampling, Beta, Categorical, CommonRatio, ControlVariate, Dirichlet,
936        ExponentialDist, GammaDist, HaltonGenerator, LatinHypercubeSampler, MultivariateNormal,
937        SobolGenerator, VonMises, WeightedChoice,
938    };
939}
940
941/// High-performance computing functionality
942pub mod hpc {
943    pub use crate::random::{
944        random_he_weights, random_normal_array, random_uniform_array, random_xavier_weights,
945        BatchRng, DistributedRngPool, OptimizedArrayRandom, ParallelRng, ThreadLocalRngPool,
946    };
947}
948
949/// 🚀 **Cutting-edge algorithms**
950pub mod cutting_edge {
951    pub use crate::random::{
952        advanced_numerical::*, cutting_edge_mcmc::*, ecosystem_integration::*, neural_sampling::*,
953        quantum_inspired::*,
954    };
955}
956
957/// Advanced MCMC and Bayesian inference
958pub mod bayesian {
959    pub use crate::random::{
960        EllipticalSliceSampler, HamiltonianMonteCarlo, ImportanceSampler, NoUTurnSampler,
961        ParallelTempering, SteinVariationalGradientDescent,
962    };
963    // AdaptiveMetropolisHastings is available through the cutting_edge module
964}
965
966/// Neural and AI-based sampling methods
967pub mod ai_sampling {
968    pub use crate::random::{
969        DiffusionConfig, EnergyBasedModel, NeuralBridge, NeuralPosteriorEstimation,
970        NormalizingFlow, ScoreBasedDiffusion,
971    };
972}
973
974/// Quantum-inspired computational methods
975pub mod quantum {
976    pub use crate::random::{
977        CoinParameters, QuantumAmplitudeAmplification, QuantumInspiredAnnealing,
978        QuantumInspiredEvolutionary, QuantumWalk,
979    };
980}
981
982/// Advanced numerical methods and optimization
983pub mod numerical_methods {
984    pub use crate::random::{
985        AdaptiveResult, AdaptiveSampler, ImportanceResult, MLMCResult, MultiLevelMonteCarlo,
986        SequentialMonteCarlo,
987    };
988}
989
990/// Ecosystem integration and bridge utilities
991pub mod bridges {
992    pub use crate::random::{
993        AugmentationConfig, ExperimentalDesign, LinalgBridge, NeuralBridge, OptimizationBridge,
994        StatsBridge, SyntheticDataset,
995    };
996}