1use super::config::*;
4use crate::error::{MLError, Result};
5use crate::qnn::{QNNLayerType, QuantumNeuralNetwork};
6use ndarray::{s, Array1, Array2};
7use serde::{Deserialize, Serialize};
8use std::collections::HashMap;
9use std::f64::consts::PI;
10
11#[derive(Debug, Clone)]
13pub struct QuantumFeatureExtractor {
14 config: FeatureEngineeringConfig,
16
17 feature_circuits: Vec<Vec<f64>>,
19
20 transform_network: QuantumNeuralNetwork,
22
23 fourier_generator: Option<QuantumFourierFeatures>,
25
26 wavelet_transformer: Option<QuantumWaveletTransform>,
28
29 feature_stats: FeatureStatistics,
31}
32
33#[derive(Debug, Clone, Serialize, Deserialize)]
35pub struct QuantumFourierFeatures {
36 num_components: usize,
38
39 frequency_ranges: Vec<(f64, f64)>,
41
42 qft_circuit: Vec<f64>,
44
45 learned_frequencies: Array1<f64>,
47
48 phase_relationships: Array2<f64>,
50}
51
52#[derive(Debug, Clone, Serialize, Deserialize)]
54pub struct QuantumWaveletTransform {
55 wavelet_type: WaveletType,
57
58 num_levels: usize,
60
61 wavelet_circuits: Vec<Vec<f64>>,
63
64 threshold: f64,
66
67 coefficients: Vec<Array2<f64>>,
69}
70
71#[derive(Debug, Clone, Serialize, Deserialize)]
73pub struct FeatureStatistics {
74 pub means: Array1<f64>,
76
77 pub stds: Array1<f64>,
79
80 pub ranges: Array1<f64>,
82
83 pub correlations: Array2<f64>,
85
86 pub entanglement_measures: Array1<f64>,
88}
89
90#[derive(Debug, Clone)]
92pub struct LagFeatureGenerator {
93 lag_periods: Vec<usize>,
94 feature_names: Vec<String>,
95}
96
97#[derive(Debug, Clone)]
99pub struct RollingStatsCalculator {
100 window_sizes: Vec<usize>,
101 stats_types: Vec<StatType>,
102}
103
104#[derive(Debug, Clone, Serialize, Deserialize)]
106pub enum StatType {
107 Mean,
108 Std,
109 Min,
110 Max,
111 Median,
112 Quantile(f64),
113 Skewness,
114 Kurtosis,
115}
116
117#[derive(Debug, Clone)]
119pub struct InteractionFeatureGenerator {
120 max_interaction_order: usize,
121 interaction_types: Vec<InteractionType>,
122}
123
124#[derive(Debug, Clone, Serialize, Deserialize)]
126pub enum InteractionType {
127 Multiplication,
128 Division,
129 Addition,
130 Subtraction,
131 QuantumEntanglement,
132}
133
134impl QuantumFeatureExtractor {
135 pub fn new(config: FeatureEngineeringConfig, num_qubits: usize) -> Result<Self> {
137 let mut feature_circuits = Vec::new();
139
140 for circuit_idx in 0..5 {
141 let mut circuit_params = Vec::new();
142
143 for qubit_idx in 0..num_qubits {
145 circuit_params.push(1.0); circuit_params.push(PI * circuit_idx as f64 / 5.0); }
148
149 for qubit_idx in 0..num_qubits.saturating_sub(1) {
151 circuit_params.push(2.0); circuit_params.push(PI / 4.0 * qubit_idx as f64); }
154
155 feature_circuits.push(circuit_params);
156 }
157
158 let layers = vec![
160 QNNLayerType::EncodingLayer { num_features: 100 },
161 QNNLayerType::VariationalLayer { num_params: 50 },
162 QNNLayerType::MeasurementLayer {
163 measurement_basis: "computational".to_string(),
164 },
165 ];
166
167 let transform_network = QuantumNeuralNetwork::new(layers, num_qubits, 100, 50)?;
168
169 let fourier_generator = if config.quantum_fourier_features {
171 Some(QuantumFourierFeatures::new(
172 20,
173 vec![(0.1, 10.0), (10.0, 100.0)],
174 num_qubits,
175 )?)
176 } else {
177 None
178 };
179
180 let wavelet_transformer = if config.wavelet_decomposition {
182 Some(QuantumWaveletTransform::new(
183 WaveletType::Daubechies(4),
184 3,
185 num_qubits,
186 )?)
187 } else {
188 None
189 };
190
191 let feature_stats = FeatureStatistics::new();
193
194 Ok(Self {
195 config,
196 feature_circuits,
197 transform_network,
198 fourier_generator,
199 wavelet_transformer,
200 feature_stats,
201 })
202 }
203
204 pub fn extract_features(&self, data: &Array2<f64>) -> Result<Array2<f64>> {
206 let mut features = data.clone();
207
208 features = self.add_lag_features(&features)?;
210
211 features = self.add_rolling_features(&features)?;
213
214 if let Some(ref fourier_gen) = self.fourier_generator {
216 features = fourier_gen.transform(&features)?;
217 }
218
219 if let Some(ref wavelet_trans) = self.wavelet_transformer {
221 features = wavelet_trans.decompose(&features)?;
222 }
223
224 if self.config.interaction_features {
226 features = self.add_interaction_features(&features)?;
227 }
228
229 features = self.apply_quantum_transformation(&features)?;
231
232 features = self.normalize_features(&features)?;
234
235 Ok(features)
236 }
237
238 fn add_lag_features(&self, data: &Array2<f64>) -> Result<Array2<f64>> {
240 if self.config.lag_features.is_empty() {
241 return Ok(data.clone());
242 }
243
244 let (n_samples, n_features) = data.dim();
245 let total_lag_features = self.config.lag_features.len() * n_features;
246 let mut enhanced_data = Array2::zeros((n_samples, n_features + total_lag_features));
247
248 enhanced_data.slice_mut(s![.., 0..n_features]).assign(data);
250
251 let mut feature_offset = n_features;
253 for &lag in &self.config.lag_features {
254 for feature_idx in 0..n_features {
255 for sample_idx in lag..n_samples {
256 enhanced_data[[sample_idx, feature_offset]] =
257 data[[sample_idx - lag, feature_idx]];
258 }
259 feature_offset += 1;
260 }
261 }
262
263 Ok(enhanced_data)
264 }
265
266 fn add_rolling_features(&self, data: &Array2<f64>) -> Result<Array2<f64>> {
268 if self.config.rolling_windows.is_empty() {
269 return Ok(data.clone());
270 }
271
272 let (n_samples, n_features) = data.dim();
273 let stats_per_window = 3; let total_rolling_features =
275 self.config.rolling_windows.len() * n_features * stats_per_window;
276 let mut enhanced_data = Array2::zeros((n_samples, n_features + total_rolling_features));
277
278 enhanced_data.slice_mut(s![.., 0..n_features]).assign(data);
280
281 let mut feature_offset = n_features;
283 for &window_size in &self.config.rolling_windows {
284 for feature_idx in 0..n_features {
285 for sample_idx in window_size..n_samples {
286 let window_start = sample_idx.saturating_sub(window_size);
287 let window_data = data.slice(s![window_start..sample_idx, feature_idx]);
288
289 enhanced_data[[sample_idx, feature_offset]] = window_data.mean().unwrap_or(0.0);
291
292 enhanced_data[[sample_idx, feature_offset + 1]] = window_data.std(1.0);
294
295 enhanced_data[[sample_idx, feature_offset + 2]] =
297 window_data.iter().fold(f64::NEG_INFINITY, |a, &b| a.max(b));
298 }
299 feature_offset += stats_per_window;
300 }
301 }
302
303 Ok(enhanced_data)
304 }
305
306 fn add_interaction_features(&self, data: &Array2<f64>) -> Result<Array2<f64>> {
308 let (n_samples, n_features) = data.dim();
309
310 if n_features < 2 {
311 return Ok(data.clone());
312 }
313
314 let n_interactions = n_features * (n_features - 1) / 2;
316 let mut enhanced_data = Array2::zeros((n_samples, n_features + n_interactions));
317
318 enhanced_data.slice_mut(s![.., 0..n_features]).assign(data);
320
321 let mut interaction_idx = n_features;
323 for i in 0..n_features {
324 for j in (i + 1)..n_features {
325 for sample_idx in 0..n_samples {
326 enhanced_data[[sample_idx, interaction_idx]] =
328 data[[sample_idx, i]] * data[[sample_idx, j]];
329 }
330 interaction_idx += 1;
331 }
332 }
333
334 Ok(enhanced_data)
335 }
336
337 fn apply_quantum_transformation(&self, data: &Array2<f64>) -> Result<Array2<f64>> {
339 if !self.config.quantum_features {
340 return Ok(data.clone());
341 }
342
343 let mut quantum_features = Array2::zeros((data.nrows(), self.transform_network.output_dim));
344
345 for (i, row) in data.rows().into_iter().enumerate() {
346 let row_vec = row.to_owned();
347 let transformed = self.transform_network.forward(&row_vec)?;
348 quantum_features.row_mut(i).assign(&transformed);
349 }
350
351 let (n_samples, n_features) = data.dim();
353 let mut combined_features =
354 Array2::zeros((n_samples, n_features + quantum_features.ncols()));
355
356 combined_features
357 .slice_mut(s![.., 0..n_features])
358 .assign(data);
359 combined_features
360 .slice_mut(s![.., n_features..])
361 .assign(&quantum_features);
362
363 Ok(combined_features)
364 }
365
366 fn normalize_features(&self, data: &Array2<f64>) -> Result<Array2<f64>> {
368 let mut normalized = data.clone();
370
371 for j in 0..data.ncols() {
372 let column = data.column(j);
373 let mean = column.mean().unwrap_or(0.0);
374 let std = column.std(1.0).max(1e-8); for i in 0..data.nrows() {
377 normalized[[i, j]] = (data[[i, j]] - mean) / std;
378 }
379 }
380
381 Ok(normalized)
382 }
383
384 pub fn fit_statistics(&mut self, data: &Array2<f64>) -> Result<()> {
386 self.feature_stats.compute_statistics(data)?;
387 Ok(())
388 }
389
390 pub fn get_feature_importance(&self) -> Result<Array1<f64>> {
392 Ok(self.feature_stats.entanglement_measures.clone())
394 }
395}
396
397impl QuantumFourierFeatures {
398 pub fn new(
400 num_components: usize,
401 frequency_ranges: Vec<(f64, f64)>,
402 num_qubits: usize,
403 ) -> Result<Self> {
404 let mut qft_circuit = Vec::new();
405
406 for qubit_idx in 0..num_qubits {
408 qft_circuit.push(1.0); }
410
411 for i in 0..num_qubits {
413 for j in (i + 1)..num_qubits {
414 let phase = PI / 2_f64.powi((j - i) as i32);
415 qft_circuit.push(phase);
416 }
417 }
418
419 let learned_frequencies = Array1::from_shape_fn(num_components, |i| 0.1 + i as f64 * 0.1);
421
422 let phase_relationships = Array2::zeros((num_components, num_components));
424
425 Ok(Self {
426 num_components,
427 frequency_ranges,
428 qft_circuit,
429 learned_frequencies,
430 phase_relationships,
431 })
432 }
433
434 pub fn transform(&self, data: &Array2<f64>) -> Result<Array2<f64>> {
436 let (n_samples, n_features) = data.dim();
437 let fourier_features_count = self.num_components * 2; let mut fourier_features = Array2::zeros((n_samples, n_features + fourier_features_count));
439
440 fourier_features
442 .slice_mut(s![.., 0..n_features])
443 .assign(data);
444
445 for i in 0..n_samples {
447 for (j, &freq) in self.learned_frequencies.iter().enumerate() {
448 let phase = i as f64 * freq * 2.0 * PI / n_samples as f64;
449
450 let quantum_phase = self.apply_quantum_phase_enhancement(phase, j)?;
452
453 fourier_features[[i, n_features + 2 * j]] = quantum_phase.sin();
454 fourier_features[[i, n_features + 2 * j + 1]] = quantum_phase.cos();
455 }
456 }
457
458 Ok(fourier_features)
459 }
460
461 fn apply_quantum_phase_enhancement(&self, phase: f64, component_idx: usize) -> Result<f64> {
463 let mut enhanced_phase = phase;
465
466 if component_idx < self.qft_circuit.len() {
467 let circuit_param = self.qft_circuit[component_idx % self.qft_circuit.len()];
468 enhanced_phase = phase * circuit_param + 0.1 * (phase * circuit_param).sin();
469 }
470
471 Ok(enhanced_phase)
472 }
473
474 pub fn learn_frequencies(&mut self, data: &Array2<f64>) -> Result<()> {
476 for i in 0..self.num_components.min(data.ncols()) {
478 let column = data.column(i % data.ncols());
480 let estimated_freq = self.estimate_dominant_frequency(&column)?;
481 self.learned_frequencies[i] = estimated_freq;
482 }
483
484 Ok(())
485 }
486
487 fn estimate_dominant_frequency(&self, signal: &ndarray::ArrayView1<f64>) -> Result<f64> {
489 let n = signal.len();
491 let mut max_power = 0.0;
492 let mut dominant_freq = 0.1;
493
494 for k in 1..n / 2 {
495 let freq = k as f64 / n as f64;
496 let mut power = 0.0;
497
498 for (i, &value) in signal.iter().enumerate() {
499 power += value * (2.0 * PI * freq * i as f64).cos();
500 }
501
502 if power.abs() > max_power {
503 max_power = power.abs();
504 dominant_freq = freq;
505 }
506 }
507
508 Ok(dominant_freq)
509 }
510}
511
512impl QuantumWaveletTransform {
513 pub fn new(wavelet_type: WaveletType, num_levels: usize, num_qubits: usize) -> Result<Self> {
515 let mut wavelet_circuits = Vec::new();
516
517 for level in 0..num_levels {
518 let mut circuit_params = Vec::new();
519
520 for qubit_idx in 0..num_qubits / 2 {
522 circuit_params.push(1.0); circuit_params.push(PI / 4.0 * (level + 1) as f64); }
525
526 for qubit_idx in 0..num_qubits / 2 {
528 circuit_params.push(2.0_f64.powi(-(level as i32))); }
530
531 wavelet_circuits.push(circuit_params);
532 }
533
534 Ok(Self {
535 wavelet_type,
536 num_levels,
537 wavelet_circuits,
538 threshold: 0.1,
539 coefficients: Vec::new(),
540 })
541 }
542
543 pub fn decompose(&self, data: &Array2<f64>) -> Result<Array2<f64>> {
545 let mut decomposed = data.clone();
546
547 for level in 0..self.num_levels {
549 decomposed = self.apply_wavelet_level(&decomposed, level)?;
550 }
551
552 self.apply_threshold(&mut decomposed);
554
555 Ok(decomposed)
556 }
557
558 fn apply_wavelet_level(&self, data: &Array2<f64>, level: usize) -> Result<Array2<f64>> {
560 if level >= self.wavelet_circuits.len() {
561 return Ok(data.clone());
562 }
563
564 let circuit = &self.wavelet_circuits[level];
565 let mut result = data.clone();
566
567 for i in 0..data.nrows() {
569 for j in 0..data.ncols() {
570 let mut value = data[[i, j]];
571
572 for (k, ¶m) in circuit.iter().enumerate() {
574 let scale = 2.0_f64.powi(-(level as i32));
575 let wavelet_value = self.wavelet_function(value * scale, param)?;
576 value = value * 0.7 + wavelet_value * 0.3; }
578
579 result[[i, j]] = value;
580 }
581 }
582
583 Ok(result)
584 }
585
586 fn wavelet_function(&self, x: f64, quantum_param: f64) -> Result<f64> {
588 match self.wavelet_type {
589 WaveletType::Haar => {
590 let classical_haar = if x >= 0.0 && x < 0.5 {
592 1.0
593 } else if x >= 0.5 && x < 1.0 {
594 -1.0
595 } else {
596 0.0
597 };
598
599 let quantum_enhancement = (quantum_param * x).sin() * 0.1;
600 Ok(classical_haar + quantum_enhancement)
601 }
602 WaveletType::Daubechies(_) => {
603 let classical = (PI * x).sin() * (-x * x / 2.0).exp();
605 let quantum_enhancement = (quantum_param * x * PI).cos() * 0.05;
606 Ok(classical + quantum_enhancement)
607 }
608 WaveletType::Quantum => {
609 let quantum_phase = quantum_param * x * PI;
611 Ok(quantum_phase.sin() * (-x * x).exp())
612 }
613 _ => {
614 Ok((PI * x).sin() * (-x * x / 2.0).exp())
616 }
617 }
618 }
619
620 fn apply_threshold(&self, data: &mut Array2<f64>) {
622 for value in data.iter_mut() {
623 if value.abs() < self.threshold {
624 *value = 0.0;
625 }
626 }
627 }
628}
629
630impl FeatureStatistics {
631 pub fn new() -> Self {
633 Self {
634 means: Array1::zeros(0),
635 stds: Array1::zeros(0),
636 ranges: Array1::zeros(0),
637 correlations: Array2::zeros((0, 0)),
638 entanglement_measures: Array1::zeros(0),
639 }
640 }
641
642 pub fn compute_statistics(&mut self, data: &Array2<f64>) -> Result<()> {
644 let (n_samples, n_features) = data.dim();
645
646 self.means = Array1::zeros(n_features);
648 self.stds = Array1::zeros(n_features);
649 self.ranges = Array1::zeros(n_features);
650
651 for j in 0..n_features {
652 let column = data.column(j);
653 self.means[j] = column.mean().unwrap_or(0.0);
654 self.stds[j] = column.std(1.0);
655
656 let min_val = column.iter().fold(f64::INFINITY, |a, &b| a.min(b));
657 let max_val = column.iter().fold(f64::NEG_INFINITY, |a, &b| a.max(b));
658 self.ranges[j] = max_val - min_val;
659 }
660
661 self.correlations = Array2::zeros((n_features, n_features));
663 for i in 0..n_features {
664 for j in 0..n_features {
665 let corr = self.compute_correlation(data, i, j)?;
666 self.correlations[[i, j]] = corr;
667 }
668 }
669
670 self.entanglement_measures = Array1::zeros(n_features);
672 for j in 0..n_features {
673 let entanglement = self.compute_quantum_entanglement(data, j)?;
674 self.entanglement_measures[j] = entanglement;
675 }
676
677 Ok(())
678 }
679
680 fn compute_correlation(&self, data: &Array2<f64>, i: usize, j: usize) -> Result<f64> {
682 let col_i = data.column(i);
683 let col_j = data.column(j);
684
685 let mean_i = col_i.mean().unwrap_or(0.0);
686 let mean_j = col_j.mean().unwrap_or(0.0);
687
688 let mut numerator = 0.0;
689 let mut sum_sq_i = 0.0;
690 let mut sum_sq_j = 0.0;
691
692 for (val_i, val_j) in col_i.iter().zip(col_j.iter()) {
693 let dev_i = val_i - mean_i;
694 let dev_j = val_j - mean_j;
695
696 numerator += dev_i * dev_j;
697 sum_sq_i += dev_i * dev_i;
698 sum_sq_j += dev_j * dev_j;
699 }
700
701 let denominator = (sum_sq_i * sum_sq_j).sqrt();
702 if denominator < 1e-10 {
703 Ok(0.0)
704 } else {
705 Ok(numerator / denominator)
706 }
707 }
708
709 fn compute_quantum_entanglement(&self, data: &Array2<f64>, feature_idx: usize) -> Result<f64> {
711 let column = data.column(feature_idx);
712
713 let mut entropy = 0.0;
715 let n_bins = 10;
716 let min_val = column.iter().fold(f64::INFINITY, |a, &b| a.min(b));
717 let max_val = column.iter().fold(f64::NEG_INFINITY, |a, &b| a.max(b));
718 let range = max_val - min_val;
719
720 if range > 1e-10 {
721 let mut bin_counts = vec![0; n_bins];
722
723 for &value in column.iter() {
724 let bin_idx = ((value - min_val) / range * (n_bins - 1) as f64) as usize;
725 let bin_idx = bin_idx.min(n_bins - 1);
726 bin_counts[bin_idx] += 1;
727 }
728
729 let n_total = column.len() as f64;
730 for &count in &bin_counts {
731 if count > 0 {
732 let prob = count as f64 / n_total;
733 entropy -= prob * prob.ln();
734 }
735 }
736 }
737
738 Ok(entropy / n_bins as f64) }
740}
741
742pub struct QuantumFeatureSelector {
744 selection_method: FeatureSelectionMethod,
745 max_features: Option<usize>,
746}
747
748#[derive(Debug, Clone, Serialize, Deserialize)]
750pub enum FeatureSelectionMethod {
751 QuantumMutualInformation,
752 QuantumEntanglement,
753 VariationalImportance,
754 HybridSelection,
755}
756
757impl QuantumFeatureSelector {
758 pub fn new(method: FeatureSelectionMethod, max_features: Option<usize>) -> Self {
760 Self {
761 selection_method: method,
762 max_features,
763 }
764 }
765
766 pub fn select_features(&self, data: &Array2<f64>, target: &Array1<f64>) -> Result<Vec<usize>> {
768 match self.selection_method {
769 FeatureSelectionMethod::QuantumMutualInformation => {
770 self.quantum_mutual_information_selection(data, target)
771 }
772 FeatureSelectionMethod::QuantumEntanglement => {
773 self.quantum_entanglement_selection(data, target)
774 }
775 FeatureSelectionMethod::VariationalImportance => {
776 self.variational_importance_selection(data, target)
777 }
778 FeatureSelectionMethod::HybridSelection => self.hybrid_selection(data, target),
779 }
780 }
781
782 fn quantum_mutual_information_selection(
784 &self,
785 data: &Array2<f64>,
786 target: &Array1<f64>,
787 ) -> Result<Vec<usize>> {
788 let n_features = data.ncols();
789 let mut feature_scores = Vec::new();
790
791 for feature_idx in 0..n_features {
792 let column = data.column(feature_idx);
793 let mutual_info = self.compute_quantum_mutual_information(&column, target)?;
794 feature_scores.push((feature_idx, mutual_info));
795 }
796
797 feature_scores.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal));
799
800 let num_to_select = self.max_features.unwrap_or(n_features).min(n_features);
801 Ok(feature_scores
802 .into_iter()
803 .take(num_to_select)
804 .map(|(idx, _)| idx)
805 .collect())
806 }
807
808 fn compute_quantum_mutual_information(
810 &self,
811 feature: &ndarray::ArrayView1<f64>,
812 target: &Array1<f64>,
813 ) -> Result<f64> {
814 let mut mutual_info = 0.0;
816
817 let n_bins = 5;
819 let feature_bins = self.discretize_values(feature, n_bins)?;
820 let target_bins = self.discretize_values(&target.view(), n_bins)?;
821
822 let n_samples = feature.len();
824 let mut joint_counts = HashMap::new();
825 let mut feature_counts = HashMap::new();
826 let mut target_counts = HashMap::new();
827
828 for i in 0..n_samples {
829 let f_bin = feature_bins[i];
830 let t_bin = target_bins[i];
831
832 *joint_counts.entry((f_bin, t_bin)).or_insert(0) += 1;
833 *feature_counts.entry(f_bin).or_insert(0) += 1;
834 *target_counts.entry(t_bin).or_insert(0) += 1;
835 }
836
837 for ((f_bin, t_bin), &joint_count) in &joint_counts {
839 let joint_prob = joint_count as f64 / n_samples as f64;
840 let feature_prob = *feature_counts.get(f_bin).unwrap_or(&0) as f64 / n_samples as f64;
841 let target_prob = *target_counts.get(t_bin).unwrap_or(&0) as f64 / n_samples as f64;
842
843 if joint_prob > 0.0 && feature_prob > 0.0 && target_prob > 0.0 {
844 let classical_mi = joint_prob * (joint_prob / (feature_prob * target_prob)).ln();
845
846 let quantum_factor = 1.0 + 0.1 * (joint_prob * PI).sin().abs();
848 mutual_info += classical_mi * quantum_factor;
849 }
850 }
851
852 Ok(mutual_info)
853 }
854
855 fn discretize_values(
857 &self,
858 values: &ndarray::ArrayView1<f64>,
859 n_bins: usize,
860 ) -> Result<Vec<usize>> {
861 let min_val = values.iter().fold(f64::INFINITY, |a, &b| a.min(b));
862 let max_val = values.iter().fold(f64::NEG_INFINITY, |a, &b| a.max(b));
863 let range = max_val - min_val;
864
865 let mut bins = Vec::new();
866 for &value in values.iter() {
867 let bin_idx = if range > 1e-10 {
868 ((value - min_val) / range * (n_bins - 1) as f64) as usize
869 } else {
870 0
871 };
872 bins.push(bin_idx.min(n_bins - 1));
873 }
874
875 Ok(bins)
876 }
877
878 fn quantum_entanglement_selection(
880 &self,
881 data: &Array2<f64>,
882 target: &Array1<f64>,
883 ) -> Result<Vec<usize>> {
884 Ok((0..data.ncols()).collect())
886 }
887
888 fn variational_importance_selection(
889 &self,
890 data: &Array2<f64>,
891 target: &Array1<f64>,
892 ) -> Result<Vec<usize>> {
893 Ok((0..data.ncols()).collect())
895 }
896
897 fn hybrid_selection(&self, data: &Array2<f64>, target: &Array1<f64>) -> Result<Vec<usize>> {
898 self.quantum_mutual_information_selection(data, target)
900 }
901}