Skip to main content

torsh_functional/random_ops/
basic.rs

1//! Basic random number generation operations
2//!
3//! This module provides fundamental random number generation functions including
4//! uniform distributions, normal distributions, and basic random sampling operations.
5//! All operations follow PyTorch's random generation API for compatibility.
6
7use scirs2_core::random::{Random, Rng};
8use torsh_core::{Result as TorshResult, TorshError};
9use torsh_tensor::Tensor;
10
11/// Generate tensor with values drawn from uniform distribution [0, 1)
12///
13/// ## Mathematical Background
14///
15/// The uniform distribution U(a,b) has probability density function:
16/// ```text
17/// f(x) = 1/(b-a)  for a ≤ x ≤ b
18///      = 0        otherwise
19/// ```text
20///
21/// Properties:
22/// - **Mean**: μ = (a+b)/2
23/// - **Variance**: σ² = (b-a)²/12
24/// - **Support**: [a, b) (a ≤ x < b)
25///
26/// ## Parameters
27/// * `shape` - Shape of the output tensor
28/// * `low` - Lower bound (inclusive, default 0.0)
29/// * `high` - Upper bound (exclusive, default 1.0)
30/// * `generator` - Optional random number generator seed
31///
32/// ## Returns
33/// * Tensor filled with uniformly distributed random values
34///
35/// ## Example
36/// ```rust
37/// # use torsh_functional::random_ops::rand;
38/// let tensor = rand(&[3, 4], None, None, Some(42))?; // [3, 4] tensor with values in [0, 1)
39/// let custom = rand(&[2, 2], Some(-1.0), Some(1.0), None)?; // Values in [-1, 1)
40/// # Ok::<(), Box<dyn std::error::Error>>(())
41/// ```text
42pub fn rand(
43    shape: &[usize],
44    low: Option<f32>,
45    high: Option<f32>,
46    generator: Option<u64>,
47) -> TorshResult<Tensor> {
48    let low = low.unwrap_or(0.0);
49    let high = high.unwrap_or(1.0);
50
51    uniform_(shape, low, high, generator)
52}
53
54/// Fill tensor with values drawn from uniform distribution
55///
56/// ## Mathematical Implementation
57///
58/// Uses the linear congruential generator method:
59/// ```text
60/// X_n+1 = (a × X_n + c) mod m
61/// U = X_n / m  ∈ [0, 1)
62/// Y = low + U × (high - low)  ∈ [low, high)
63/// ```text
64///
65/// ## Parameters
66/// * `shape` - Shape of the tensor
67/// * `low` - Lower bound (inclusive)
68/// * `high` - Upper bound (exclusive)
69/// * `generator` - Optional random number generator seed
70///
71/// ## Returns
72/// * Tensor filled with uniformly distributed values in [low, high)
73///
74/// ## Errors
75/// * Returns error if low >= high
76pub fn uniform_(
77    shape: &[usize],
78    low: f32,
79    high: f32,
80    generator: Option<u64>,
81) -> TorshResult<Tensor> {
82    if low >= high {
83        return Err(TorshError::InvalidArgument(
84            "uniform_: low must be less than high".to_string(),
85        ));
86    }
87
88    // Create RNG using SciRS2
89    let mut rng = if let Some(seed) = generator {
90        Random::seed(seed)
91    } else {
92        Random::seed(42) // Default seed for reproducible behavior
93    };
94
95    // Generate uniform distributed values using SciRS2
96    let size: usize = shape.iter().product();
97    let mut values = Vec::with_capacity(size);
98
99    for _ in 0..size {
100        // Use SciRS2's uniform generation
101        values.push(rng.random::<f32>() * (high - low) + low);
102    }
103
104    Tensor::from_vec(values, shape)
105}
106
107/// Generate tensor with values drawn from normal distribution
108///
109/// ## Mathematical Background
110///
111/// The normal (Gaussian) distribution N(μ, σ²) has probability density function:
112/// ```text
113/// f(x) = (1/(σ√(2π))) × exp(-½((x-μ)/σ)²)
114/// ```text
115///
116/// Properties:
117/// - **Mean**: μ
118/// - **Variance**: σ²
119/// - **Standard deviation**: σ
120/// - **Support**: (-∞, ∞)
121/// - **68-95-99.7 rule**: ~68% within μ±σ, ~95% within μ±2σ, ~99.7% within μ±3σ
122///
123/// ## Box-Muller Transformation
124///
125/// Converts uniform random variables to normal:
126/// ```text
127/// U₁, U₂ ~ Uniform(0,1)
128/// Z₀ = √(-2 ln U₁) × cos(2π U₂)
129/// Z₁ = √(-2 ln U₁) × sin(2π U₂)
130/// Z₀, Z₁ ~ N(0,1)
131/// X = μ + σZ  ~ N(μ, σ²)
132/// ```text
133///
134/// ## Parameters
135/// * `shape` - Shape of the tensor
136/// * `mean` - Mean of the normal distribution
137/// * `std` - Standard deviation of the normal distribution
138/// * `generator` - Optional random number generator seed
139///
140/// ## Returns
141/// * Tensor filled with normally distributed values N(mean, std²)
142pub fn normal_(
143    shape: &[usize],
144    mean: f32,
145    std: f32,
146    generator: Option<u64>,
147) -> TorshResult<Tensor> {
148    if std < 0.0 {
149        return Err(TorshError::InvalidArgument(
150            "normal_: std must be non-negative".to_string(),
151        ));
152    }
153
154    // Create RNG
155    let mut rng = if let Some(seed) = generator {
156        Random::seed(seed)
157    } else {
158        Random::seed(42) // Default seed for reproducible behavior
159    };
160
161    // Generate normal distributed values
162    let size: usize = shape.iter().product();
163    let mut values = Vec::with_capacity(size);
164
165    for _ in 0..size {
166        // Box-Muller transform for normal distribution
167        let u1: f32 = rng.gen_range(0.0..1.0);
168        let u2: f32 = rng.gen_range(0.0..1.0);
169        let z0 = (-2.0f32 * u1.ln()).sqrt() * (2.0 * std::f32::consts::PI * u2).cos();
170        values.push(mean + std * z0);
171    }
172
173    Tensor::from_vec(values, shape)
174}
175
176/// Generate tensor with values drawn from standard normal distribution
177///
178/// Equivalent to `normal_(shape, 0.0, 1.0, generator)`.
179/// This is a convenience function matching PyTorch's `randn()` API.
180///
181/// ## Standard Normal Distribution
182///
183/// The standard normal distribution N(0,1) is the foundation for all normal distributions:
184/// - **Mean**: 0
185/// - **Variance**: 1
186/// - **Standard deviation**: 1
187///
188/// Any normal distribution can be derived from standard normal:
189/// ```text
190/// X ~ N(μ, σ²)  ⟺  X = μ + σZ where Z ~ N(0,1)
191/// ```text
192///
193/// ## Parameters
194/// * `shape` - Shape of the output tensor
195/// * `mean` - Mean of the normal distribution (default 0.0)
196/// * `std` - Standard deviation of the normal distribution (default 1.0)
197/// * `generator` - Optional random number generator seed
198///
199/// ## Returns
200/// * Tensor filled with standard normally distributed values N(0,1)
201pub fn randn(
202    shape: &[usize],
203    mean: Option<f32>,
204    std: Option<f32>,
205    generator: Option<u64>,
206) -> TorshResult<Tensor> {
207    let mean = mean.unwrap_or(0.0);
208    let std = std.unwrap_or(1.0);
209
210    normal_(shape, mean, std, generator)
211}
212
213/// Generate random integers in the range [low, high)
214///
215/// ## Discrete Uniform Distribution
216///
217/// For integers in range \[a, b), each value has equal probability:
218/// ```text
219/// P(X = k) = 1/(b-a)  for k ∈ {a, a+1, ..., b-1}
220///          = 0        otherwise
221/// ```text
222///
223/// Properties:
224/// - **Mean**: μ = (a+b-1)/2
225/// - **Variance**: σ² = ((b-a)²-1)/12
226/// - **Support**: {a, a+1, ..., b-1}
227///
228/// ## Parameters
229/// * `shape` - Shape of the tensor
230/// * `low` - Lower bound (inclusive)
231/// * `high` - Upper bound (exclusive)
232/// * `generator` - Optional random number generator seed
233///
234/// ## Returns
235/// * Tensor filled with random integers in [low, high)
236///
237/// ## Errors
238/// * Returns error if low >= high
239pub fn randint_(
240    shape: &[usize],
241    low: i32,
242    high: i32,
243    generator: Option<u64>,
244) -> TorshResult<Tensor> {
245    if low >= high {
246        return Err(TorshError::InvalidArgument(
247            "randint_: low must be less than high".to_string(),
248        ));
249    }
250
251    // Create RNG
252    let mut rng = if let Some(seed) = generator {
253        Random::seed(seed)
254    } else {
255        Random::seed(42) // Default seed for reproducible behavior
256    };
257
258    let size: usize = shape.iter().product();
259    let mut values = Vec::with_capacity(size);
260
261    for _ in 0..size {
262        let val = rng.gen_range(low..high);
263        values.push(val as f32);
264    }
265
266    Tensor::from_vec(values, shape)
267}
268
269/// Generate random integers in the range [0, high)
270///
271/// Convenience function equivalent to `randint_(shape, 0, high, generator)`.
272///
273/// ## Parameters
274/// * `shape` - Shape of the tensor
275/// * `high` - Upper bound (exclusive)
276/// * `generator` - Optional random number generator seed
277///
278/// ## Returns
279/// * Tensor filled with random integers in [0, high)
280pub fn randint(shape: &[usize], high: i32, generator: Option<u64>) -> TorshResult<Tensor> {
281    randint_(shape, 0, high, generator)
282}
283
284/// Generate random permutation of integers from 0 to n-1
285///
286/// ## Mathematical Background
287///
288/// A random permutation is a bijective mapping π: {0,1,...,n-1} → {0,1,...,n-1}
289/// where each of the n! possible permutations has equal probability 1/n!.
290///
291/// ## Fisher-Yates Shuffle Algorithm
292///
293/// Modern implementation uses the Fisher-Yates shuffle for O(n) efficiency:
294/// ```text
295/// for i = n-1 down to 1:
296///     j = random(0, i+1)
297///     swap(array[i], array[j])
298/// ```text
299///
300/// This algorithm ensures:
301/// - **Unbiased**: Each permutation has probability 1/n!
302/// - **Efficient**: O(n) time complexity
303/// - **In-place**: O(1) additional space
304///
305/// ## Parameters
306/// * `n` - Number of elements to permute (generates 0, 1, ..., n-1)
307/// * `generator` - Optional random number generator seed
308///
309/// ## Returns
310/// * 1D tensor containing a random permutation of [0, 1, ..., n-1]
311///
312/// ## Applications
313/// - **Data shuffling**: Randomize dataset order for training
314/// - **Sampling**: Create random subsets without replacement
315/// - **Bootstrap methods**: Generate random resampling indices
316/// - **Monte Carlo**: Random ordering for statistical simulations
317pub fn randperm(n: usize, generator: Option<u64>) -> TorshResult<Tensor> {
318    if n == 0 {
319        return Tensor::from_vec(vec![], &[0]);
320    }
321
322    // Create initial sequence [0, 1, 2, ..., n-1]
323    let mut values: Vec<f32> = (0..n).map(|i| i as f32).collect();
324
325    // Create RNG
326    let mut rng = if let Some(seed) = generator {
327        Random::seed(seed)
328    } else {
329        Random::seed(42) // Default seed for reproducible behavior
330    };
331
332    // Fisher-Yates shuffle
333    for i in (1..n).rev() {
334        let j = rng.gen_range(0..=i);
335        values.swap(i, j);
336    }
337
338    Tensor::from_vec(values, &[n])
339}