scirs2-neural 0.4.3

Neural network building blocks module for SciRS2 (scirs2-neural) - Minimal Version
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
//! Cross-validation utilities
//!
//! This module provides utilities for cross-validation of models.

use super::{EvaluationConfig, Evaluator, MetricType, ModelBuilder};
use crate::data::Dataset;
use crate::error::{NeuralError, Result};
use crate::layers::Layer;
use scirs2_core::ndarray::ScalarOperand;
use scirs2_core::numeric::{Float, FromPrimitive, NumAssign};
use scirs2_core::random::SeedableRng;
use std::collections::HashMap;
use std::fmt::Debug;

/// Cross-validation strategy
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum CrossValidationStrategy {
    /// K-fold cross-validation
    KFold(usize),
    /// Stratified K-fold cross-validation
    StratifiedKFold(usize),
    /// Leave-one-out cross-validation
    LeaveOneOut,
    /// Leave-P-out cross-validation
    LeavePOut(usize),
    /// Random shuffling cross-validation
    ShuffleSplit(usize, f64),
}

/// Configuration for cross-validation
#[derive(Debug, Clone)]
pub struct CrossValidationConfig {
    /// Cross-validation strategy
    pub strategy: CrossValidationStrategy,
    /// Whether to shuffle the data before splitting
    pub shuffle: bool,
    /// Random seed for shuffling
    pub random_seed: Option<u64>,
    /// Batch size for evaluation
    pub batch_size: usize,
    /// Number of workers for data loading
    pub num_workers: usize,
    /// Metrics to compute during evaluation
    pub metrics: Vec<MetricType>,
    /// Verbosity level
    pub verbose: usize,
}

impl Default for CrossValidationConfig {
    fn default() -> Self {
        Self {
            strategy: CrossValidationStrategy::KFold(5),
            shuffle: true,
            random_seed: None,
            batch_size: 32,
            num_workers: 0,
            metrics: vec![MetricType::Loss, MetricType::Accuracy],
            verbose: 1,
        }
    }
}

/// Cross-validation fold
#[derive(Debug)]
pub struct CrossValidationFold {
    /// Training indices
    pub train_indices: Vec<usize>,
    /// Validation indices
    pub val_indices: Vec<usize>,
}

/// Cross-validator for model evaluation
pub struct CrossValidator<
    F: Float + Debug + ScalarOperand + FromPrimitive + NumAssign + std::fmt::Display + Send + Sync,
> {
    /// Configuration for cross-validation
    pub config: CrossValidationConfig,
    /// Evaluator for validation metrics
    evaluator: Evaluator<F>,
}

/// Simple in-memory dataset subset
struct DatasetSubset<
    F: Float + Debug + ScalarOperand + FromPrimitive + NumAssign + Send + Sync + 'static,
> {
    data: Vec<(
        scirs2_core::ndarray::Array<F, scirs2_core::ndarray::IxDyn>,
        scirs2_core::ndarray::Array<F, scirs2_core::ndarray::IxDyn>,
    )>,
}

impl<F: Float + Debug + ScalarOperand + FromPrimitive + NumAssign + Send + Sync + 'static>
    DatasetSubset<F>
{
    fn new(dataset: &dyn Dataset<F>, indices: &[usize]) -> Result<Self> {
        let mut data = Vec::with_capacity(indices.len());
        for &idx in indices {
            let (input, target) = dataset.get(idx)?;
            data.push((input, target));
        }
        Ok(Self { data })
    }
}

impl<F: Float + Debug + ScalarOperand + FromPrimitive + NumAssign + Send + Sync + 'static>
    Dataset<F> for DatasetSubset<F>
{
    fn len(&self) -> usize {
        self.data.len()
    }

    fn get(
        &self,
        idx: usize,
    ) -> Result<(
        scirs2_core::ndarray::Array<F, scirs2_core::ndarray::IxDyn>,
        scirs2_core::ndarray::Array<F, scirs2_core::ndarray::IxDyn>,
    )> {
        if idx >= self.data.len() {
            return Err(crate::error::NeuralError::InferenceError(format!(
                "Index out of bounds: {} >= {}",
                idx,
                self.data.len()
            )));
        }
        Ok((self.data[idx].0.clone(), self.data[idx].1.clone()))
    }

    fn box_clone(&self) -> Box<dyn Dataset<F> + Send + Sync> {
        let cloned_data = self.data.clone();
        Box::new(Self { data: cloned_data })
    }
}

impl<
        F: Float + Debug + ScalarOperand + FromPrimitive + NumAssign + std::fmt::Display + Send + Sync,
    > CrossValidator<F>
{
    /// Create a new cross-validator
    pub fn new(config: CrossValidationConfig) -> Result<Self> {
        // Create evaluator
        let eval_config = EvaluationConfig {
            batch_size: config.batch_size,
            shuffle: false, // We handle shuffling during fold creation
            num_workers: config.num_workers,
            metrics: config.metrics.clone(),
            steps: None,
            verbose: config.verbose,
        };
        let evaluator = Evaluator::new(eval_config)?;
        Ok(Self { config, evaluator })
    }

    /// Generate cross-validation folds
    pub fn create_folds(&self, dataset: &dyn Dataset<F>) -> Result<Vec<CrossValidationFold>> {
        let n_samples = dataset.len();
        match self.config.strategy {
            CrossValidationStrategy::KFold(k) => {
                if k < 2 {
                    return Err(NeuralError::ValidationError(
                        "k must be at least 2".to_string(),
                    ));
                }
                if n_samples < k {
                    return Err(NeuralError::ValidationError(format!(
                        "Dataset size ({}) must be at least equal to k ({})",
                        n_samples, k
                    )));
                }
                // Create indices
                let mut indices: Vec<usize> = (0..n_samples).collect();
                // Shuffle if required
                if self.config.shuffle {
                    use scirs2_core::random::seq::SliceRandom;
                    if let Some(seed) = self.config.random_seed {
                        let mut rng = scirs2_core::random::rngs::StdRng::seed_from_u64(seed);
                        indices.shuffle(&mut rng);
                    } else {
                        let mut rng = scirs2_core::random::rng();
                        indices.shuffle(&mut rng);
                    }
                }
                // Calculate fold sizes
                let fold_size = n_samples / k;
                let remainder = n_samples % k;
                // Create folds
                let mut folds = Vec::with_capacity(k);
                let mut start = 0;
                for i in 0..k {
                    // Adjust fold size to distribute remainder
                    let fold_size_adjusted = if i < remainder {
                        fold_size + 1
                    } else {
                        fold_size
                    };
                    let end = start + fold_size_adjusted;
                    // Validation indices for this fold
                    let val_indices = indices[start..end].to_vec();
                    // Training indices (all except validation)
                    let mut train_indices = Vec::with_capacity(n_samples - val_indices.len());
                    for &idx in &indices[0..start] {
                        train_indices.push(idx);
                    }
                    for &idx in &indices[end..] {
                        train_indices.push(idx);
                    }
                    folds.push(CrossValidationFold {
                        train_indices,
                        val_indices,
                    });
                    start = end;
                }
                Ok(folds)
            }
            CrossValidationStrategy::StratifiedKFold(k) => {
                // For stratified k-fold, we need class labels
                let mut class_indices: HashMap<usize, Vec<usize>> = HashMap::new();
                for i in 0..n_samples {
                    let (_, target) = dataset.get(i)?;
                    // Extract class index
                    let class_idx = if target.ndim() > 1 && target.shape()[1] > 1 {
                        // One-hot encoded: find max index
                        let mut max_idx = 0;
                        let mut max_val = target[[0, 0]];
                        for j in 1..target.shape()[1] {
                            if target[[0, j]] > max_val {
                                max_idx = j;
                                max_val = target[[0, j]];
                            }
                        }
                        max_idx
                    } else {
                        // Direct class index
                        target[[0]].to_usize().unwrap_or(0)
                    };
                    class_indices.entry(class_idx).or_default().push(i);
                }
                // Create folds with stratification
                let mut folds: Vec<CrossValidationFold> = (0..k)
                    .map(|_| CrossValidationFold {
                        train_indices: Vec::new(),
                        val_indices: Vec::new(),
                    })
                    .collect();
                // Distribute indices by class
                for (_, mut indices) in class_indices {
                    // Shuffle indices within class
                    if self.config.shuffle {
                        use scirs2_core::random::seq::SliceRandom;
                        if let Some(seed) = self.config.random_seed {
                            let mut rng = scirs2_core::random::rngs::StdRng::seed_from_u64(seed);
                            indices.shuffle(&mut rng);
                        } else {
                            let mut rng = scirs2_core::random::rng();
                            indices.shuffle(&mut rng);
                        }
                    }
                    // Distribute indices to folds
                    for (i, &idx) in indices.iter().enumerate() {
                        let fold_idx = i % k;
                        folds[fold_idx].val_indices.push(idx);
                    }
                }
                // Fill in training indices
                for fold in &mut folds {
                    let val_set: std::collections::HashSet<usize> =
                        fold.val_indices.iter().cloned().collect();
                    let train_indices: Vec<usize> =
                        (0..n_samples).filter(|i| !val_set.contains(i)).collect();
                    fold.train_indices = train_indices;
                }
                Ok(folds)
            }
            CrossValidationStrategy::LeaveOneOut => {
                // Leave-one-out: each sample becomes a fold
                let mut folds = Vec::with_capacity(n_samples);
                for i in 0..n_samples {
                    let val_indices = vec![i];
                    let mut train_indices = Vec::with_capacity(n_samples - 1);
                    for j in 0..n_samples {
                        if j != i {
                            train_indices.push(j);
                        }
                    }
                    folds.push(CrossValidationFold {
                        train_indices,
                        val_indices,
                    });
                }
                Ok(folds)
            }
            CrossValidationStrategy::LeavePOut(p) => {
                if p >= n_samples {
                    return Err(NeuralError::ValidationError(format!(
                        "p ({}) must be less than dataset size ({})",
                        p, n_samples
                    )));
                }
                // Simplified: create n/p folds with p samples each
                let indices: Vec<usize> = (0..n_samples).collect();
                let n_folds = n_samples / p;
                let mut folds = Vec::with_capacity(n_folds);
                for i in 0..n_folds {
                    let start = i * p;
                    let end = ((i + 1) * p).min(n_samples);
                    let val_indices = indices[start..end].to_vec();
                    let mut train_indices = Vec::with_capacity(n_samples - p);
                    for (j, &idx) in indices.iter().enumerate() {
                        if j < start || j >= end {
                            train_indices.push(idx);
                        }
                    }
                    folds.push(CrossValidationFold {
                        train_indices,
                        val_indices,
                    });
                }
                Ok(folds)
            }
            CrossValidationStrategy::ShuffleSplit(n_splits, test_size) => {
                if test_size <= 0.0 || test_size >= 1.0 {
                    return Err(NeuralError::ValidationError(
                        "test_size must be between 0 and 1".to_string(),
                    ));
                }
                let indices: Vec<usize> = (0..n_samples).collect();
                // Calculate test set size
                let test_count = (n_samples as f64 * test_size).ceil() as usize;
                if test_count >= n_samples {
                    return Err(NeuralError::ValidationError(
                        "test_size too large for dataset".to_string(),
                    ));
                }
                let mut folds = Vec::with_capacity(n_splits);
                for _ in 0..n_splits {
                    // Shuffle indices
                    let mut shuffled = indices.clone();
                    use scirs2_core::random::seq::SliceRandom;
                    if let Some(seed) = self.config.random_seed {
                        let mut rng = scirs2_core::random::rngs::StdRng::seed_from_u64(seed);
                        shuffled.shuffle(&mut rng);
                    } else {
                        let mut rng = scirs2_core::random::rng();
                        shuffled.shuffle(&mut rng);
                    }
                    // Split into train and validation
                    let val_indices = shuffled[0..test_count].to_vec();
                    let train_indices = shuffled[test_count..].to_vec();
                    folds.push(CrossValidationFold {
                        train_indices,
                        val_indices,
                    });
                }
                Ok(folds)
            }
        }
    }

    /// Perform cross-validation on a model builder and dataset
    pub fn cross_validate<L: Layer<F> + Clone>(
        &mut self,
        model_builder: &dyn ModelBuilder<F, Model = L>,
        dataset: &dyn Dataset<F>,
        loss_fn: Option<&dyn crate::losses::Loss<F>>,
    ) -> Result<HashMap<String, Vec<F>>> {
        // Generate folds
        let folds = self.create_folds(dataset)?;
        // Initialize results
        let metrics = &self.config.metrics;
        let mut results: HashMap<String, Vec<F>> = metrics
            .iter()
            .map(|m| {
                let name = match m {
                    MetricType::Loss => "loss".to_string(),
                    MetricType::Accuracy => "accuracy".to_string(),
                    MetricType::Precision => "precision".to_string(),
                    MetricType::Recall => "recall".to_string(),
                    MetricType::F1Score => "f1_score".to_string(),
                    MetricType::MeanSquaredError => "mse".to_string(),
                    MetricType::MeanAbsoluteError => "mae".to_string(),
                    MetricType::RSquared => "r2".to_string(),
                    MetricType::AUC => "auc".to_string(),
                    MetricType::Custom(name) => name.clone(),
                };
                (name, Vec::with_capacity(folds.len()))
            })
            .collect();
        // Perform cross-validation
        for (fold_idx, fold) in folds.iter().enumerate() {
            if self.config.verbose > 0 {
                println!("Fold {}/{}", fold_idx + 1, folds.len());
            }
            let _train_dataset = DatasetSubset::new(dataset, &fold.train_indices)?;
            let val_dataset = DatasetSubset::new(dataset, &fold.val_indices)?;
            // Build model
            let model = model_builder.build()?;
            // Evaluate on validation set
            let fold_metrics = self.evaluator.evaluate(&model, &val_dataset, loss_fn)?;
            // Store results
            for (name, value) in fold_metrics {
                if let Some(values) = results.get_mut(&name) {
                    values.push(value);
                }
            }
        }
        // Print summary if verbose
        if self.config.verbose > 0 {
            for (name, values) in &results {
                if !values.is_empty() {
                    let sum = values.iter().fold(F::zero(), |acc, &x| acc + x);
                    let mean = sum / F::from(values.len()).expect("Operation failed");
                    let variance_sum = values
                        .iter()
                        .fold(F::zero(), |acc, &x| acc + (x - mean) * (x - mean));
                    let std =
                        (variance_sum / F::from(values.len()).expect("Operation failed")).sqrt();
                    println!("{}: {:.4} ± {:.4}", name, mean, std);
                }
            }
        }
        Ok(results)
    }
}