1use super::config::*;
4use crate::error::{MLError, Result};
5use crate::qnn::{QNNLayerType, QuantumNeuralNetwork};
6use scirs2_core::ndarray::{s, Array1, Array2};
7use serde::{Deserialize, Serialize};
8use std::collections::HashMap;
9use std::f64::consts::PI;
10
11#[derive(Debug, Clone)]
13pub struct QuantumFeatureExtractor {
14 config: FeatureEngineeringConfig,
16
17 feature_circuits: Vec<Vec<f64>>,
19
20 transform_network: QuantumNeuralNetwork,
22
23 fourier_generator: Option<QuantumFourierFeatures>,
25
26 wavelet_transformer: Option<QuantumWaveletTransform>,
28
29 feature_stats: FeatureStatistics,
31}
32
33#[derive(Debug, Clone, Serialize, Deserialize)]
35pub struct QuantumFourierFeatures {
36 num_components: usize,
38
39 frequency_ranges: Vec<(f64, f64)>,
41
42 qft_circuit: Vec<f64>,
44
45 learned_frequencies: Array1<f64>,
47
48 phase_relationships: Array2<f64>,
50}
51
52#[derive(Debug, Clone, Serialize, Deserialize)]
54pub struct QuantumWaveletTransform {
55 wavelet_type: WaveletType,
57
58 num_levels: usize,
60
61 wavelet_circuits: Vec<Vec<f64>>,
63
64 threshold: f64,
66
67 coefficients: Vec<Array2<f64>>,
69}
70
71#[derive(Debug, Clone, Serialize, Deserialize)]
73pub struct FeatureStatistics {
74 pub means: Array1<f64>,
76
77 pub stds: Array1<f64>,
79
80 pub ranges: Array1<f64>,
82
83 pub correlations: Array2<f64>,
85
86 pub entanglement_measures: Array1<f64>,
88}
89
90#[derive(Debug, Clone)]
92pub struct LagFeatureGenerator {
93 lag_periods: Vec<usize>,
94 feature_names: Vec<String>,
95}
96
97#[derive(Debug, Clone)]
99pub struct RollingStatsCalculator {
100 window_sizes: Vec<usize>,
101 stats_types: Vec<StatType>,
102}
103
104#[derive(Debug, Clone, Serialize, Deserialize)]
106pub enum StatType {
107 Mean,
108 Std,
109 Min,
110 Max,
111 Median,
112 Quantile(f64),
113 Skewness,
114 Kurtosis,
115}
116
117#[derive(Debug, Clone)]
119pub struct InteractionFeatureGenerator {
120 max_interaction_order: usize,
121 interaction_types: Vec<InteractionType>,
122}
123
124#[derive(Debug, Clone, Serialize, Deserialize)]
126pub enum InteractionType {
127 Multiplication,
128 Division,
129 Addition,
130 Subtraction,
131 QuantumEntanglement,
132}
133
134impl QuantumFeatureExtractor {
135 pub fn new(config: FeatureEngineeringConfig, num_qubits: usize) -> Result<Self> {
137 let mut feature_circuits = Vec::new();
139
140 for circuit_idx in 0..5 {
141 let mut circuit_params = Vec::new();
142
143 for qubit_idx in 0..num_qubits {
145 circuit_params.push(1.0); circuit_params.push(PI * circuit_idx as f64 / 5.0); }
148
149 for qubit_idx in 0..num_qubits.saturating_sub(1) {
151 circuit_params.push(2.0); circuit_params.push(PI / 4.0 * qubit_idx as f64); }
154
155 feature_circuits.push(circuit_params);
156 }
157
158 let layers = vec![
160 QNNLayerType::EncodingLayer { num_features: 100 },
161 QNNLayerType::VariationalLayer { num_params: 50 },
162 QNNLayerType::MeasurementLayer {
163 measurement_basis: "computational".to_string(),
164 },
165 ];
166
167 let transform_network = QuantumNeuralNetwork::new(layers, num_qubits, 100, 50)?;
168
169 let fourier_generator = if config.quantum_fourier_features {
171 Some(QuantumFourierFeatures::new(
172 20,
173 vec![(0.1, 10.0), (10.0, 100.0)],
174 num_qubits,
175 )?)
176 } else {
177 None
178 };
179
180 let wavelet_transformer = if config.wavelet_decomposition {
182 Some(QuantumWaveletTransform::new(
183 WaveletType::Daubechies(4),
184 3,
185 num_qubits,
186 )?)
187 } else {
188 None
189 };
190
191 let feature_stats = FeatureStatistics::new();
193
194 Ok(Self {
195 config,
196 feature_circuits,
197 transform_network,
198 fourier_generator,
199 wavelet_transformer,
200 feature_stats,
201 })
202 }
203
204 pub fn extract_features(&self, data: &Array2<f64>) -> Result<Array2<f64>> {
206 let mut features = data.clone();
207
208 features = self.add_lag_features(&features)?;
210
211 features = self.add_rolling_features(&features)?;
213
214 if let Some(ref fourier_gen) = self.fourier_generator {
216 features = fourier_gen.transform(&features)?;
217 }
218
219 if let Some(ref wavelet_trans) = self.wavelet_transformer {
221 features = wavelet_trans.decompose(&features)?;
222 }
223
224 if self.config.interaction_features {
226 features = self.add_interaction_features(&features)?;
227 }
228
229 features = self.apply_quantum_transformation(&features)?;
231
232 features = self.normalize_features(&features)?;
234
235 Ok(features)
236 }
237
238 fn add_lag_features(&self, data: &Array2<f64>) -> Result<Array2<f64>> {
240 if self.config.lag_features.is_empty() {
241 return Ok(data.clone());
242 }
243
244 let (n_samples, n_features) = data.dim();
245 let total_lag_features = self.config.lag_features.len() * n_features;
246 let mut enhanced_data = Array2::zeros((n_samples, n_features + total_lag_features));
247
248 enhanced_data.slice_mut(s![.., 0..n_features]).assign(data);
250
251 let mut feature_offset = n_features;
253 for &lag in &self.config.lag_features {
254 for feature_idx in 0..n_features {
255 for sample_idx in lag..n_samples {
256 enhanced_data[[sample_idx, feature_offset]] =
257 data[[sample_idx - lag, feature_idx]];
258 }
259 feature_offset += 1;
260 }
261 }
262
263 Ok(enhanced_data)
264 }
265
266 fn add_rolling_features(&self, data: &Array2<f64>) -> Result<Array2<f64>> {
268 if self.config.rolling_windows.is_empty() {
269 return Ok(data.clone());
270 }
271
272 let (n_samples, n_features) = data.dim();
273 let stats_per_window = 3; let total_rolling_features =
275 self.config.rolling_windows.len() * n_features * stats_per_window;
276 let mut enhanced_data = Array2::zeros((n_samples, n_features + total_rolling_features));
277
278 enhanced_data.slice_mut(s![.., 0..n_features]).assign(data);
280
281 let mut feature_offset = n_features;
283 for &window_size in &self.config.rolling_windows {
284 for feature_idx in 0..n_features {
285 for sample_idx in window_size..n_samples {
286 let window_start = sample_idx.saturating_sub(window_size);
287 let window_data = data.slice(s![window_start..sample_idx, feature_idx]);
288
289 enhanced_data[[sample_idx, feature_offset]] = window_data.mean().unwrap_or(0.0);
291
292 enhanced_data[[sample_idx, feature_offset + 1]] = window_data.std(1.0);
294
295 enhanced_data[[sample_idx, feature_offset + 2]] =
297 window_data.iter().fold(f64::NEG_INFINITY, |a, &b| a.max(b));
298 }
299 feature_offset += stats_per_window;
300 }
301 }
302
303 Ok(enhanced_data)
304 }
305
306 fn add_interaction_features(&self, data: &Array2<f64>) -> Result<Array2<f64>> {
308 let (n_samples, n_features) = data.dim();
309
310 if n_features < 2 {
311 return Ok(data.clone());
312 }
313
314 let n_interactions = n_features * (n_features - 1) / 2;
316 let mut enhanced_data = Array2::zeros((n_samples, n_features + n_interactions));
317
318 enhanced_data.slice_mut(s![.., 0..n_features]).assign(data);
320
321 let mut interaction_idx = n_features;
323 for i in 0..n_features {
324 for j in (i + 1)..n_features {
325 for sample_idx in 0..n_samples {
326 enhanced_data[[sample_idx, interaction_idx]] =
328 data[[sample_idx, i]] * data[[sample_idx, j]];
329 }
330 interaction_idx += 1;
331 }
332 }
333
334 Ok(enhanced_data)
335 }
336
337 fn apply_quantum_transformation(&self, data: &Array2<f64>) -> Result<Array2<f64>> {
339 if !self.config.quantum_features {
340 return Ok(data.clone());
341 }
342
343 let mut quantum_features = Array2::zeros((data.nrows(), self.transform_network.output_dim));
344
345 for (i, row) in data.rows().into_iter().enumerate() {
346 let row_vec = row.to_owned();
347 let transformed = self.transform_network.forward(&row_vec)?;
348 quantum_features.row_mut(i).assign(&transformed);
349 }
350
351 let (n_samples, n_features) = data.dim();
353 let mut combined_features =
354 Array2::zeros((n_samples, n_features + quantum_features.ncols()));
355
356 combined_features
357 .slice_mut(s![.., 0..n_features])
358 .assign(data);
359 combined_features
360 .slice_mut(s![.., n_features..])
361 .assign(&quantum_features);
362
363 Ok(combined_features)
364 }
365
366 fn normalize_features(&self, data: &Array2<f64>) -> Result<Array2<f64>> {
368 let mut normalized = data.clone();
370
371 for j in 0..data.ncols() {
372 let column = data.column(j);
373 let mean = column.mean().unwrap_or(0.0);
374 let std = column.std(1.0).max(1e-8); for i in 0..data.nrows() {
377 normalized[[i, j]] = (data[[i, j]] - mean) / std;
378 }
379 }
380
381 Ok(normalized)
382 }
383
384 pub fn fit_statistics(&mut self, data: &Array2<f64>) -> Result<()> {
386 self.feature_stats.compute_statistics(data)?;
387 Ok(())
388 }
389
390 pub fn get_feature_importance(&self) -> Result<Array1<f64>> {
392 Ok(self.feature_stats.entanglement_measures.clone())
394 }
395}
396
397impl QuantumFourierFeatures {
398 pub fn new(
400 num_components: usize,
401 frequency_ranges: Vec<(f64, f64)>,
402 num_qubits: usize,
403 ) -> Result<Self> {
404 let mut qft_circuit = Vec::new();
405
406 for qubit_idx in 0..num_qubits {
408 qft_circuit.push(1.0); }
410
411 for i in 0..num_qubits {
413 for j in (i + 1)..num_qubits {
414 let phase = PI / 2_f64.powi((j - i) as i32);
415 qft_circuit.push(phase);
416 }
417 }
418
419 let learned_frequencies = Array1::from_shape_fn(num_components, |i| 0.1 + i as f64 * 0.1);
421
422 let phase_relationships = Array2::zeros((num_components, num_components));
424
425 Ok(Self {
426 num_components,
427 frequency_ranges,
428 qft_circuit,
429 learned_frequencies,
430 phase_relationships,
431 })
432 }
433
434 pub fn transform(&self, data: &Array2<f64>) -> Result<Array2<f64>> {
436 let (n_samples, n_features) = data.dim();
437 let fourier_features_count = self.num_components * 2; let mut fourier_features = Array2::zeros((n_samples, n_features + fourier_features_count));
439
440 fourier_features
442 .slice_mut(s![.., 0..n_features])
443 .assign(data);
444
445 for i in 0..n_samples {
447 for (j, &freq) in self.learned_frequencies.iter().enumerate() {
448 let phase = i as f64 * freq * 2.0 * PI / n_samples as f64;
449
450 let quantum_phase = self.apply_quantum_phase_enhancement(phase, j)?;
452
453 fourier_features[[i, n_features + 2 * j]] = quantum_phase.sin();
454 fourier_features[[i, n_features + 2 * j + 1]] = quantum_phase.cos();
455 }
456 }
457
458 Ok(fourier_features)
459 }
460
461 fn apply_quantum_phase_enhancement(&self, phase: f64, component_idx: usize) -> Result<f64> {
463 let mut enhanced_phase = phase;
465
466 if component_idx < self.qft_circuit.len() {
467 let circuit_param = self.qft_circuit[component_idx % self.qft_circuit.len()];
468 enhanced_phase = phase * circuit_param + 0.1 * (phase * circuit_param).sin();
469 }
470
471 Ok(enhanced_phase)
472 }
473
474 pub fn learn_frequencies(&mut self, data: &Array2<f64>) -> Result<()> {
476 for i in 0..self.num_components.min(data.ncols()) {
478 let column = data.column(i % data.ncols());
480 let estimated_freq = self.estimate_dominant_frequency(&column)?;
481 self.learned_frequencies[i] = estimated_freq;
482 }
483
484 Ok(())
485 }
486
487 fn estimate_dominant_frequency(
489 &self,
490 signal: &scirs2_core::ndarray::ArrayView1<f64>,
491 ) -> Result<f64> {
492 let n = signal.len();
494 let mut max_power = 0.0;
495 let mut dominant_freq = 0.1;
496
497 for k in 1..n / 2 {
498 let freq = k as f64 / n as f64;
499 let mut power = 0.0;
500
501 for (i, &value) in signal.iter().enumerate() {
502 power += value * (2.0 * PI * freq * i as f64).cos();
503 }
504
505 if power.abs() > max_power {
506 max_power = power.abs();
507 dominant_freq = freq;
508 }
509 }
510
511 Ok(dominant_freq)
512 }
513}
514
515impl QuantumWaveletTransform {
516 pub fn new(wavelet_type: WaveletType, num_levels: usize, num_qubits: usize) -> Result<Self> {
518 let mut wavelet_circuits = Vec::new();
519
520 for level in 0..num_levels {
521 let mut circuit_params = Vec::new();
522
523 for qubit_idx in 0..num_qubits / 2 {
525 circuit_params.push(1.0); circuit_params.push(PI / 4.0 * (level + 1) as f64); }
528
529 for qubit_idx in 0..num_qubits / 2 {
531 circuit_params.push(2.0_f64.powi(-(level as i32))); }
533
534 wavelet_circuits.push(circuit_params);
535 }
536
537 Ok(Self {
538 wavelet_type,
539 num_levels,
540 wavelet_circuits,
541 threshold: 0.1,
542 coefficients: Vec::new(),
543 })
544 }
545
546 pub fn decompose(&self, data: &Array2<f64>) -> Result<Array2<f64>> {
548 let mut decomposed = data.clone();
549
550 for level in 0..self.num_levels {
552 decomposed = self.apply_wavelet_level(&decomposed, level)?;
553 }
554
555 self.apply_threshold(&mut decomposed);
557
558 Ok(decomposed)
559 }
560
561 fn apply_wavelet_level(&self, data: &Array2<f64>, level: usize) -> Result<Array2<f64>> {
563 if level >= self.wavelet_circuits.len() {
564 return Ok(data.clone());
565 }
566
567 let circuit = &self.wavelet_circuits[level];
568 let mut result = data.clone();
569
570 for i in 0..data.nrows() {
572 for j in 0..data.ncols() {
573 let mut value = data[[i, j]];
574
575 for (k, ¶m) in circuit.iter().enumerate() {
577 let scale = 2.0_f64.powi(-(level as i32));
578 let wavelet_value = self.wavelet_function(value * scale, param)?;
579 value = value * 0.7 + wavelet_value * 0.3; }
581
582 result[[i, j]] = value;
583 }
584 }
585
586 Ok(result)
587 }
588
589 fn wavelet_function(&self, x: f64, quantum_param: f64) -> Result<f64> {
591 match self.wavelet_type {
592 WaveletType::Haar => {
593 let classical_haar = if x >= 0.0 && x < 0.5 {
595 1.0
596 } else if x >= 0.5 && x < 1.0 {
597 -1.0
598 } else {
599 0.0
600 };
601
602 let quantum_enhancement = (quantum_param * x).sin() * 0.1;
603 Ok(classical_haar + quantum_enhancement)
604 }
605 WaveletType::Daubechies(_) => {
606 let classical = (PI * x).sin() * (-x * x / 2.0).exp();
608 let quantum_enhancement = (quantum_param * x * PI).cos() * 0.05;
609 Ok(classical + quantum_enhancement)
610 }
611 WaveletType::Quantum => {
612 let quantum_phase = quantum_param * x * PI;
614 Ok(quantum_phase.sin() * (-x * x).exp())
615 }
616 _ => {
617 Ok((PI * x).sin() * (-x * x / 2.0).exp())
619 }
620 }
621 }
622
623 fn apply_threshold(&self, data: &mut Array2<f64>) {
625 for value in data.iter_mut() {
626 if value.abs() < self.threshold {
627 *value = 0.0;
628 }
629 }
630 }
631}
632
633impl FeatureStatistics {
634 pub fn new() -> Self {
636 Self {
637 means: Array1::zeros(0),
638 stds: Array1::zeros(0),
639 ranges: Array1::zeros(0),
640 correlations: Array2::zeros((0, 0)),
641 entanglement_measures: Array1::zeros(0),
642 }
643 }
644
645 pub fn compute_statistics(&mut self, data: &Array2<f64>) -> Result<()> {
647 let (n_samples, n_features) = data.dim();
648
649 self.means = Array1::zeros(n_features);
651 self.stds = Array1::zeros(n_features);
652 self.ranges = Array1::zeros(n_features);
653
654 for j in 0..n_features {
655 let column = data.column(j);
656 self.means[j] = column.mean().unwrap_or(0.0);
657 self.stds[j] = column.std(1.0);
658
659 let min_val = column.iter().fold(f64::INFINITY, |a, &b| a.min(b));
660 let max_val = column.iter().fold(f64::NEG_INFINITY, |a, &b| a.max(b));
661 self.ranges[j] = max_val - min_val;
662 }
663
664 self.correlations = Array2::zeros((n_features, n_features));
666 for i in 0..n_features {
667 for j in 0..n_features {
668 let corr = self.compute_correlation(data, i, j)?;
669 self.correlations[[i, j]] = corr;
670 }
671 }
672
673 self.entanglement_measures = Array1::zeros(n_features);
675 for j in 0..n_features {
676 let entanglement = self.compute_quantum_entanglement(data, j)?;
677 self.entanglement_measures[j] = entanglement;
678 }
679
680 Ok(())
681 }
682
683 fn compute_correlation(&self, data: &Array2<f64>, i: usize, j: usize) -> Result<f64> {
685 let col_i = data.column(i);
686 let col_j = data.column(j);
687
688 let mean_i = col_i.mean().unwrap_or(0.0);
689 let mean_j = col_j.mean().unwrap_or(0.0);
690
691 let mut numerator = 0.0;
692 let mut sum_sq_i = 0.0;
693 let mut sum_sq_j = 0.0;
694
695 for (val_i, val_j) in col_i.iter().zip(col_j.iter()) {
696 let dev_i = val_i - mean_i;
697 let dev_j = val_j - mean_j;
698
699 numerator += dev_i * dev_j;
700 sum_sq_i += dev_i * dev_i;
701 sum_sq_j += dev_j * dev_j;
702 }
703
704 let denominator = (sum_sq_i * sum_sq_j).sqrt();
705 if denominator < 1e-10 {
706 Ok(0.0)
707 } else {
708 Ok(numerator / denominator)
709 }
710 }
711
712 fn compute_quantum_entanglement(&self, data: &Array2<f64>, feature_idx: usize) -> Result<f64> {
714 let column = data.column(feature_idx);
715
716 let mut entropy = 0.0;
718 let n_bins = 10;
719 let min_val = column.iter().fold(f64::INFINITY, |a, &b| a.min(b));
720 let max_val = column.iter().fold(f64::NEG_INFINITY, |a, &b| a.max(b));
721 let range = max_val - min_val;
722
723 if range > 1e-10 {
724 let mut bin_counts = vec![0; n_bins];
725
726 for &value in column.iter() {
727 let bin_idx = ((value - min_val) / range * (n_bins - 1) as f64) as usize;
728 let bin_idx = bin_idx.min(n_bins - 1);
729 bin_counts[bin_idx] += 1;
730 }
731
732 let n_total = column.len() as f64;
733 for &count in &bin_counts {
734 if count > 0 {
735 let prob = count as f64 / n_total;
736 entropy -= prob * prob.ln();
737 }
738 }
739 }
740
741 Ok(entropy / n_bins as f64) }
743}
744
745pub struct QuantumFeatureSelector {
747 selection_method: FeatureSelectionMethod,
748 max_features: Option<usize>,
749}
750
751#[derive(Debug, Clone, Serialize, Deserialize)]
753pub enum FeatureSelectionMethod {
754 QuantumMutualInformation,
755 QuantumEntanglement,
756 VariationalImportance,
757 HybridSelection,
758}
759
760impl QuantumFeatureSelector {
761 pub fn new(method: FeatureSelectionMethod, max_features: Option<usize>) -> Self {
763 Self {
764 selection_method: method,
765 max_features,
766 }
767 }
768
769 pub fn select_features(&self, data: &Array2<f64>, target: &Array1<f64>) -> Result<Vec<usize>> {
771 match self.selection_method {
772 FeatureSelectionMethod::QuantumMutualInformation => {
773 self.quantum_mutual_information_selection(data, target)
774 }
775 FeatureSelectionMethod::QuantumEntanglement => {
776 self.quantum_entanglement_selection(data, target)
777 }
778 FeatureSelectionMethod::VariationalImportance => {
779 self.variational_importance_selection(data, target)
780 }
781 FeatureSelectionMethod::HybridSelection => self.hybrid_selection(data, target),
782 }
783 }
784
785 fn quantum_mutual_information_selection(
787 &self,
788 data: &Array2<f64>,
789 target: &Array1<f64>,
790 ) -> Result<Vec<usize>> {
791 let n_features = data.ncols();
792 let mut feature_scores = Vec::new();
793
794 for feature_idx in 0..n_features {
795 let column = data.column(feature_idx);
796 let mutual_info = self.compute_quantum_mutual_information(&column, target)?;
797 feature_scores.push((feature_idx, mutual_info));
798 }
799
800 feature_scores.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal));
802
803 let num_to_select = self.max_features.unwrap_or(n_features).min(n_features);
804 Ok(feature_scores
805 .into_iter()
806 .take(num_to_select)
807 .map(|(idx, _)| idx)
808 .collect())
809 }
810
811 fn compute_quantum_mutual_information(
813 &self,
814 feature: &scirs2_core::ndarray::ArrayView1<f64>,
815 target: &Array1<f64>,
816 ) -> Result<f64> {
817 let mut mutual_info = 0.0;
819
820 let n_bins = 5;
822 let feature_bins = self.discretize_values(feature, n_bins)?;
823 let target_bins = self.discretize_values(&target.view(), n_bins)?;
824
825 let n_samples = feature.len();
827 let mut joint_counts = HashMap::new();
828 let mut feature_counts = HashMap::new();
829 let mut target_counts = HashMap::new();
830
831 for i in 0..n_samples {
832 let f_bin = feature_bins[i];
833 let t_bin = target_bins[i];
834
835 *joint_counts.entry((f_bin, t_bin)).or_insert(0) += 1;
836 *feature_counts.entry(f_bin).or_insert(0) += 1;
837 *target_counts.entry(t_bin).or_insert(0) += 1;
838 }
839
840 for ((f_bin, t_bin), &joint_count) in &joint_counts {
842 let joint_prob = joint_count as f64 / n_samples as f64;
843 let feature_prob = *feature_counts.get(f_bin).unwrap_or(&0) as f64 / n_samples as f64;
844 let target_prob = *target_counts.get(t_bin).unwrap_or(&0) as f64 / n_samples as f64;
845
846 if joint_prob > 0.0 && feature_prob > 0.0 && target_prob > 0.0 {
847 let classical_mi = joint_prob * (joint_prob / (feature_prob * target_prob)).ln();
848
849 let quantum_factor = 1.0 + 0.1 * (joint_prob * PI).sin().abs();
851 mutual_info += classical_mi * quantum_factor;
852 }
853 }
854
855 Ok(mutual_info)
856 }
857
858 fn discretize_values(
860 &self,
861 values: &scirs2_core::ndarray::ArrayView1<f64>,
862 n_bins: usize,
863 ) -> Result<Vec<usize>> {
864 let min_val = values.iter().fold(f64::INFINITY, |a, &b| a.min(b));
865 let max_val = values.iter().fold(f64::NEG_INFINITY, |a, &b| a.max(b));
866 let range = max_val - min_val;
867
868 let mut bins = Vec::new();
869 for &value in values.iter() {
870 let bin_idx = if range > 1e-10 {
871 ((value - min_val) / range * (n_bins - 1) as f64) as usize
872 } else {
873 0
874 };
875 bins.push(bin_idx.min(n_bins - 1));
876 }
877
878 Ok(bins)
879 }
880
881 fn quantum_entanglement_selection(
883 &self,
884 data: &Array2<f64>,
885 target: &Array1<f64>,
886 ) -> Result<Vec<usize>> {
887 Ok((0..data.ncols()).collect())
889 }
890
891 fn variational_importance_selection(
892 &self,
893 data: &Array2<f64>,
894 target: &Array1<f64>,
895 ) -> Result<Vec<usize>> {
896 Ok((0..data.ncols()).collect())
898 }
899
900 fn hybrid_selection(&self, data: &Array2<f64>, target: &Array1<f64>) -> Result<Vec<usize>> {
901 self.quantum_mutual_information_selection(data, target)
903 }
904}