sklears_kernel_approximation/
finance_kernels.rs

1//! Finance and Economics Kernel Methods
2//!
3//! This module implements kernel methods for financial and economic applications,
4//! including financial time series analysis, volatility modeling, econometric methods,
5//! portfolio optimization, and risk analysis.
6//!
7//! # References
8//! - Engle (1982): "Autoregressive Conditional Heteroskedasticity"
9//! - Bollerslev (1986): "Generalized Autoregressive Conditional Heteroskedasticity"
10//! - Tsay (2010): "Analysis of Financial Time Series"
11//! - McNeil et al. (2015): "Quantitative Risk Management"
12
13use scirs2_core::ndarray::{Array1, Array2};
14use scirs2_core::random::essentials::Normal;
15use scirs2_core::random::{thread_rng, Distribution};
16use sklears_core::{
17    error::{Result, SklearsError},
18    prelude::{Fit, Transform},
19    traits::{Trained, Untrained},
20    types::Float,
21};
22use std::marker::PhantomData;
23
24// ============================================================================
25// Financial Kernel
26// ============================================================================
27
28/// Kernel method for financial time series analysis
29///
30/// This kernel extracts features from financial time series data including
31/// returns, volatility, technical indicators, and market microstructure features.
32///
33/// # References
34/// - Murphy (1999): "Technical Analysis of the Financial Markets"
35pub struct FinancialKernel<State = Untrained> {
36    /// Number of random features
37    n_components: usize,
38    /// Window size for technical indicators
39    window_size: usize,
40    /// Whether to include volatility features
41    include_volatility: bool,
42    /// Random projection matrix
43    projection: Option<Array2<Float>>,
44    /// Feature weights (returns, volatility, indicators)
45    feature_weights: Option<Array1<Float>>,
46    /// State marker
47    _state: PhantomData<State>,
48}
49
50impl FinancialKernel<Untrained> {
51    /// Create a new financial kernel
52    pub fn new(n_components: usize, window_size: usize) -> Self {
53        Self {
54            n_components,
55            window_size,
56            include_volatility: true,
57            projection: None,
58            feature_weights: None,
59            _state: PhantomData,
60        }
61    }
62
63    /// Set whether to include volatility features
64    pub fn include_volatility(mut self, include_volatility: bool) -> Self {
65        self.include_volatility = include_volatility;
66        self
67    }
68}
69
70impl Default for FinancialKernel<Untrained> {
71    fn default() -> Self {
72        Self::new(100, 20)
73    }
74}
75
76impl Fit<Array2<Float>, ()> for FinancialKernel<Untrained> {
77    type Fitted = FinancialKernel<Trained>;
78
79    fn fit(self, x: &Array2<Float>, _y: &()) -> Result<Self::Fitted> {
80        let n_samples = x.nrows();
81        if n_samples == 0 {
82            return Err(SklearsError::InvalidInput(
83                "Input array cannot be empty".to_string(),
84            ));
85        }
86
87        // Feature dimension: returns + volatility + technical indicators
88        let base_features = 10; // Returns features
89        let volatility_features = if self.include_volatility { 5 } else { 0 };
90        let technical_features = 8; // Moving averages, RSI, etc.
91        let feature_dim = base_features + volatility_features + technical_features;
92
93        let mut rng = thread_rng();
94        let normal = Normal::new(0.0, 1.0 / (feature_dim as Float).sqrt()).unwrap();
95
96        // Generate random projection
97        let mut projection = Array2::zeros((feature_dim, self.n_components));
98        for i in 0..feature_dim {
99            for j in 0..self.n_components {
100                projection[[i, j]] = normal.sample(&mut rng);
101            }
102        }
103
104        // Initialize feature weights (returns > volatility > indicators)
105        let mut feature_weights = Array1::zeros(3);
106        feature_weights[0] = 1.0; // Returns
107        feature_weights[1] = 0.7; // Volatility
108        feature_weights[2] = 0.5; // Technical indicators
109
110        Ok(FinancialKernel {
111            n_components: self.n_components,
112            window_size: self.window_size,
113            include_volatility: self.include_volatility,
114            projection: Some(projection),
115            feature_weights: Some(feature_weights),
116            _state: PhantomData,
117        })
118    }
119}
120
121impl Transform<Array2<Float>, Array2<Float>> for FinancialKernel<Trained> {
122    fn transform(&self, x: &Array2<Float>) -> Result<Array2<Float>> {
123        let n_samples = x.nrows();
124        let n_features = x.ncols();
125
126        if n_samples == 0 {
127            return Err(SklearsError::InvalidInput(
128                "Input array cannot be empty".to_string(),
129            ));
130        }
131
132        let projection = self.projection.as_ref().unwrap();
133        let feature_dim = projection.nrows();
134        let feature_weights = self.feature_weights.as_ref().unwrap();
135
136        // Extract financial features
137        let mut financial_features = Array2::zeros((n_samples, feature_dim));
138
139        for i in 0..n_samples {
140            let mut feat_idx = 0;
141
142            // Returns features (log returns approximation)
143            for j in 0..10.min(n_features) {
144                if j + 1 < n_features {
145                    let ret = (x[[i, j + 1]] / (x[[i, j]] + 1e-8)).ln();
146                    financial_features[[i, feat_idx]] = ret * feature_weights[0];
147                    feat_idx += 1;
148                }
149            }
150
151            // Volatility features (rolling std approximation)
152            if self.include_volatility {
153                for j in 0..5.min(n_features / 2) {
154                    let mut sum_sq = 0.0;
155                    let window = self.window_size.min(n_features - j);
156                    for k in 0..window {
157                        if j + k < n_features {
158                            sum_sq += (x[[i, j + k]] - x[[i, j]]).powi(2);
159                        }
160                    }
161                    let volatility = (sum_sq / window as Float).sqrt();
162                    if feat_idx < feature_dim {
163                        financial_features[[i, feat_idx]] = volatility * feature_weights[1];
164                        feat_idx += 1;
165                    }
166                }
167            }
168
169            // Technical indicators (simple moving average approximation)
170            for j in 0..8.min(n_features / 3) {
171                let window = self.window_size.min(n_features - j);
172                let mut sma = 0.0;
173                for k in 0..window {
174                    if j + k < n_features {
175                        sma += x[[i, j + k]];
176                    }
177                }
178                sma /= window as Float;
179                if feat_idx < feature_dim {
180                    financial_features[[i, feat_idx]] = sma * feature_weights[2];
181                    feat_idx += 1;
182                }
183            }
184        }
185
186        // Apply random projection
187        let features = financial_features.dot(projection);
188
189        Ok(features)
190    }
191}
192
193// ============================================================================
194// Volatility Kernel
195// ============================================================================
196
197/// Volatility modeling method
198#[derive(Debug, Clone, Copy)]
199pub enum VolatilityModel {
200    /// Simple historical volatility
201    Historical,
202    /// EWMA volatility
203    EWMA,
204    /// GARCH-like volatility
205    GARCH,
206    /// Realized volatility
207    Realized,
208}
209
210/// Kernel method for volatility modeling and forecasting
211///
212/// This kernel focuses on volatility features using various volatility models.
213///
214/// # References
215/// - Bollerslev (1986): "Generalized Autoregressive Conditional Heteroskedasticity"
216pub struct VolatilityKernel<State = Untrained> {
217    /// Number of random features
218    n_components: usize,
219    /// Volatility model to use
220    model: VolatilityModel,
221    /// Decay factor for EWMA (typically 0.94)
222    decay_factor: Float,
223    /// Random projection matrix
224    projection: Option<Array2<Float>>,
225    /// State marker
226    _state: PhantomData<State>,
227}
228
229impl VolatilityKernel<Untrained> {
230    /// Create a new volatility kernel
231    pub fn new(n_components: usize, model: VolatilityModel) -> Self {
232        Self {
233            n_components,
234            model,
235            decay_factor: 0.94,
236            projection: None,
237            _state: PhantomData,
238        }
239    }
240
241    /// Set the decay factor for EWMA
242    pub fn decay_factor(mut self, decay_factor: Float) -> Self {
243        self.decay_factor = decay_factor;
244        self
245    }
246}
247
248impl Default for VolatilityKernel<Untrained> {
249    fn default() -> Self {
250        Self::new(100, VolatilityModel::EWMA)
251    }
252}
253
254impl Fit<Array2<Float>, ()> for VolatilityKernel<Untrained> {
255    type Fitted = VolatilityKernel<Trained>;
256
257    fn fit(self, x: &Array2<Float>, _y: &()) -> Result<Self::Fitted> {
258        let n_samples = x.nrows();
259        if n_samples == 0 {
260            return Err(SklearsError::InvalidInput(
261                "Input array cannot be empty".to_string(),
262            ));
263        }
264
265        // Volatility features dimension
266        let feature_dim = 20;
267
268        let mut rng = thread_rng();
269        let normal = Normal::new(0.0, 1.0 / (feature_dim as Float).sqrt()).unwrap();
270
271        // Generate random projection
272        let mut projection = Array2::zeros((feature_dim, self.n_components));
273        for i in 0..feature_dim {
274            for j in 0..self.n_components {
275                projection[[i, j]] = normal.sample(&mut rng);
276            }
277        }
278
279        Ok(VolatilityKernel {
280            n_components: self.n_components,
281            model: self.model,
282            decay_factor: self.decay_factor,
283            projection: Some(projection),
284            _state: PhantomData,
285        })
286    }
287}
288
289impl Transform<Array2<Float>, Array2<Float>> for VolatilityKernel<Trained> {
290    fn transform(&self, x: &Array2<Float>) -> Result<Array2<Float>> {
291        let n_samples = x.nrows();
292        let n_features = x.ncols();
293
294        if n_samples == 0 {
295            return Err(SklearsError::InvalidInput(
296                "Input array cannot be empty".to_string(),
297            ));
298        }
299
300        let projection = self.projection.as_ref().unwrap();
301        let feature_dim = projection.nrows();
302
303        // Extract volatility features
304        let mut volatility_features = Array2::zeros((n_samples, feature_dim));
305
306        for i in 0..n_samples {
307            match self.model {
308                VolatilityModel::Historical => {
309                    // Simple historical volatility at different horizons
310                    for j in 0..feature_dim.min(n_features) {
311                        let window = 5 + j;
312                        let mut sum_sq = 0.0;
313                        let count = window.min(n_features);
314                        for k in 0..count {
315                            sum_sq += x[[i, k]].powi(2);
316                        }
317                        volatility_features[[i, j]] = (sum_sq / count as Float).sqrt();
318                    }
319                }
320                VolatilityModel::EWMA => {
321                    // Exponentially weighted moving average volatility
322                    for j in 0..feature_dim.min(n_features) {
323                        let mut ewma_var = x[[i, 0]].powi(2);
324                        for k in 1..n_features {
325                            ewma_var = self.decay_factor * ewma_var
326                                + (1.0 - self.decay_factor) * x[[i, k]].powi(2);
327                        }
328                        volatility_features[[i, j]] = ewma_var.sqrt();
329                    }
330                }
331                VolatilityModel::GARCH => {
332                    // Simplified GARCH(1,1) features
333                    for j in 0..feature_dim.min(n_features) {
334                        let alpha = 0.1;
335                        let beta = 0.85;
336                        let omega = 0.05;
337                        let mut variance = x[[i, 0]].powi(2);
338                        for k in 1..n_features {
339                            variance = omega + alpha * x[[i, k - 1]].powi(2) + beta * variance;
340                        }
341                        volatility_features[[i, j]] = variance.sqrt();
342                    }
343                }
344                VolatilityModel::Realized => {
345                    // Realized volatility (sum of squared returns)
346                    for j in 0..feature_dim.min(n_features) {
347                        let mut realized_var = 0.0;
348                        for k in 0..n_features {
349                            realized_var += x[[i, k]].powi(2);
350                        }
351                        volatility_features[[i, j]] = (realized_var / n_features as Float).sqrt();
352                    }
353                }
354            }
355        }
356
357        // Apply random projection
358        let features = volatility_features.dot(projection);
359
360        Ok(features)
361    }
362}
363
364// ============================================================================
365// Econometric Kernel
366// ============================================================================
367
368/// Kernel method for econometric time series analysis
369///
370/// This kernel implements features based on econometric models including
371/// autoregressive features, cointegration, and structural breaks.
372///
373/// # References
374/// - Hamilton (1994): "Time Series Analysis"
375pub struct EconometricKernel<State = Untrained> {
376    /// Number of random features
377    n_components: usize,
378    /// AR model order
379    ar_order: usize,
380    /// Whether to include difference features
381    include_differences: bool,
382    /// Random projection matrix
383    projection: Option<Array2<Float>>,
384    /// State marker
385    _state: PhantomData<State>,
386}
387
388impl EconometricKernel<Untrained> {
389    /// Create a new econometric kernel
390    pub fn new(n_components: usize, ar_order: usize) -> Self {
391        Self {
392            n_components,
393            ar_order,
394            include_differences: true,
395            projection: None,
396            _state: PhantomData,
397        }
398    }
399
400    /// Set whether to include difference features
401    pub fn include_differences(mut self, include_differences: bool) -> Self {
402        self.include_differences = include_differences;
403        self
404    }
405}
406
407impl Default for EconometricKernel<Untrained> {
408    fn default() -> Self {
409        Self::new(100, 4)
410    }
411}
412
413impl Fit<Array2<Float>, ()> for EconometricKernel<Untrained> {
414    type Fitted = EconometricKernel<Trained>;
415
416    fn fit(self, x: &Array2<Float>, _y: &()) -> Result<Self::Fitted> {
417        let n_samples = x.nrows();
418        if n_samples == 0 {
419            return Err(SklearsError::InvalidInput(
420                "Input array cannot be empty".to_string(),
421            ));
422        }
423
424        // Feature dimension based on AR order and differences
425        let base_dim = self.ar_order * 2;
426        let diff_dim = if self.include_differences { 10 } else { 0 };
427        let feature_dim = base_dim + diff_dim;
428
429        let mut rng = thread_rng();
430        let normal = Normal::new(0.0, 1.0 / (feature_dim as Float).sqrt()).unwrap();
431
432        // Generate random projection
433        let mut projection = Array2::zeros((feature_dim, self.n_components));
434        for i in 0..feature_dim {
435            for j in 0..self.n_components {
436                projection[[i, j]] = normal.sample(&mut rng);
437            }
438        }
439
440        Ok(EconometricKernel {
441            n_components: self.n_components,
442            ar_order: self.ar_order,
443            include_differences: self.include_differences,
444            projection: Some(projection),
445            _state: PhantomData,
446        })
447    }
448}
449
450impl Transform<Array2<Float>, Array2<Float>> for EconometricKernel<Trained> {
451    fn transform(&self, x: &Array2<Float>) -> Result<Array2<Float>> {
452        let n_samples = x.nrows();
453        let n_features = x.ncols();
454
455        if n_samples == 0 {
456            return Err(SklearsError::InvalidInput(
457                "Input array cannot be empty".to_string(),
458            ));
459        }
460
461        let projection = self.projection.as_ref().unwrap();
462        let feature_dim = projection.nrows();
463
464        // Extract econometric features
465        let mut econ_features = Array2::zeros((n_samples, feature_dim));
466
467        for i in 0..n_samples {
468            let mut feat_idx = 0;
469
470            // AR features (lagged values)
471            for lag in 1..=self.ar_order {
472                if lag < n_features && feat_idx < feature_dim {
473                    econ_features[[i, feat_idx]] = x[[i, n_features - lag - 1]];
474                    feat_idx += 1;
475                }
476            }
477
478            // Autocorrelation approximation
479            for lag in 1..=self.ar_order {
480                if lag < n_features && feat_idx < feature_dim {
481                    let mut autocorr = 0.0;
482                    let count = n_features - lag;
483                    for k in 0..count {
484                        autocorr += x[[i, k]] * x[[i, k + lag]];
485                    }
486                    econ_features[[i, feat_idx]] = autocorr / count as Float;
487                    feat_idx += 1;
488                }
489            }
490
491            // First and second differences
492            if self.include_differences {
493                for j in 0..10.min(n_features - 1) {
494                    if feat_idx < feature_dim {
495                        let diff1 = x[[i, j + 1]] - x[[i, j]];
496                        econ_features[[i, feat_idx]] = diff1;
497                        feat_idx += 1;
498                    }
499                }
500            }
501        }
502
503        // Apply random projection
504        let features = econ_features.dot(projection);
505
506        Ok(features)
507    }
508}
509
510// ============================================================================
511// Portfolio Kernel
512// ============================================================================
513
514/// Kernel method for portfolio optimization and analysis
515///
516/// This kernel extracts portfolio-relevant features including risk-return
517/// characteristics, diversification measures, and factor exposures.
518///
519/// # References
520/// - Markowitz (1952): "Portfolio Selection"
521pub struct PortfolioKernel<State = Untrained> {
522    /// Number of random features
523    n_components: usize,
524    /// Number of assets in portfolio
525    n_assets: usize,
526    /// Risk-free rate for Sharpe ratio
527    risk_free_rate: Float,
528    /// Random projection matrix
529    projection: Option<Array2<Float>>,
530    /// State marker
531    _state: PhantomData<State>,
532}
533
534impl PortfolioKernel<Untrained> {
535    /// Create a new portfolio kernel
536    pub fn new(n_components: usize, n_assets: usize) -> Self {
537        Self {
538            n_components,
539            n_assets,
540            risk_free_rate: 0.02,
541            projection: None,
542            _state: PhantomData,
543        }
544    }
545
546    /// Set the risk-free rate
547    pub fn risk_free_rate(mut self, risk_free_rate: Float) -> Self {
548        self.risk_free_rate = risk_free_rate;
549        self
550    }
551}
552
553impl Default for PortfolioKernel<Untrained> {
554    fn default() -> Self {
555        Self::new(100, 10)
556    }
557}
558
559impl Fit<Array2<Float>, ()> for PortfolioKernel<Untrained> {
560    type Fitted = PortfolioKernel<Trained>;
561
562    fn fit(self, x: &Array2<Float>, _y: &()) -> Result<Self::Fitted> {
563        let n_samples = x.nrows();
564        if n_samples == 0 {
565            return Err(SklearsError::InvalidInput(
566                "Input array cannot be empty".to_string(),
567            ));
568        }
569
570        // Portfolio features: returns, risk, correlations, diversification
571        let feature_dim = 30;
572
573        let mut rng = thread_rng();
574        let normal = Normal::new(0.0, 1.0 / (feature_dim as Float).sqrt()).unwrap();
575
576        // Generate random projection
577        let mut projection = Array2::zeros((feature_dim, self.n_components));
578        for i in 0..feature_dim {
579            for j in 0..self.n_components {
580                projection[[i, j]] = normal.sample(&mut rng);
581            }
582        }
583
584        Ok(PortfolioKernel {
585            n_components: self.n_components,
586            n_assets: self.n_assets,
587            risk_free_rate: self.risk_free_rate,
588            projection: Some(projection),
589            _state: PhantomData,
590        })
591    }
592}
593
594impl Transform<Array2<Float>, Array2<Float>> for PortfolioKernel<Trained> {
595    fn transform(&self, x: &Array2<Float>) -> Result<Array2<Float>> {
596        let n_samples = x.nrows();
597        let n_features = x.ncols();
598
599        if n_samples == 0 {
600            return Err(SklearsError::InvalidInput(
601                "Input array cannot be empty".to_string(),
602            ));
603        }
604
605        let projection = self.projection.as_ref().unwrap();
606        let feature_dim = projection.nrows();
607
608        // Extract portfolio features
609        let mut portfolio_features = Array2::zeros((n_samples, feature_dim));
610
611        for i in 0..n_samples {
612            let mut feat_idx = 0;
613
614            // Expected return approximation
615            let mut mean_return = 0.0;
616            for j in 0..n_features {
617                mean_return += x[[i, j]];
618            }
619            mean_return /= n_features as Float;
620            if feat_idx < feature_dim {
621                portfolio_features[[i, feat_idx]] = mean_return;
622                feat_idx += 1;
623            }
624
625            // Portfolio volatility approximation
626            let mut variance = 0.0;
627            for j in 0..n_features {
628                variance += (x[[i, j]] - mean_return).powi(2);
629            }
630            let volatility = (variance / n_features as Float).sqrt();
631            if feat_idx < feature_dim {
632                portfolio_features[[i, feat_idx]] = volatility;
633                feat_idx += 1;
634            }
635
636            // Sharpe ratio approximation
637            if volatility > 1e-8 && feat_idx < feature_dim {
638                let sharpe = (mean_return - self.risk_free_rate) / volatility;
639                portfolio_features[[i, feat_idx]] = sharpe;
640                feat_idx += 1;
641            }
642
643            // Diversification measure (inverse of concentration)
644            let mut herfindahl = 0.0;
645            let weight = 1.0 / n_features as Float;
646            for _ in 0..n_features {
647                herfindahl += weight.powi(2);
648            }
649            let diversification = 1.0 - herfindahl;
650            if feat_idx < feature_dim {
651                portfolio_features[[i, feat_idx]] = diversification;
652                feat_idx += 1;
653            }
654
655            // Correlation-based features (simplified)
656            for j in 0..10.min(n_features) {
657                if j + 1 < n_features && feat_idx < feature_dim {
658                    let corr_approx = x[[i, j]] * x[[i, j + 1]];
659                    portfolio_features[[i, feat_idx]] = corr_approx;
660                    feat_idx += 1;
661                }
662            }
663        }
664
665        // Apply random projection
666        let features = portfolio_features.dot(projection);
667
668        Ok(features)
669    }
670}
671
672// ============================================================================
673// Risk Kernel
674// ============================================================================
675
676/// Kernel method for financial risk analysis
677///
678/// This kernel computes risk-based features including Value-at-Risk (VaR),
679/// Conditional VaR (CVaR), and risk contribution measures.
680///
681/// # References
682/// - Jorion (2006): "Value at Risk: The New Benchmark for Managing Financial Risk"
683pub struct RiskKernel<State = Untrained> {
684    /// Number of random features
685    n_components: usize,
686    /// Confidence level for VaR (e.g., 0.95 for 95% VaR)
687    confidence_level: Float,
688    /// Whether to include tail risk measures
689    include_tail_risk: bool,
690    /// Random projection matrix
691    projection: Option<Array2<Float>>,
692    /// State marker
693    _state: PhantomData<State>,
694}
695
696impl RiskKernel<Untrained> {
697    /// Create a new risk kernel
698    pub fn new(n_components: usize, confidence_level: Float) -> Self {
699        Self {
700            n_components,
701            confidence_level,
702            include_tail_risk: true,
703            projection: None,
704            _state: PhantomData,
705        }
706    }
707
708    /// Set whether to include tail risk measures
709    pub fn include_tail_risk(mut self, include_tail_risk: bool) -> Self {
710        self.include_tail_risk = include_tail_risk;
711        self
712    }
713}
714
715impl Default for RiskKernel<Untrained> {
716    fn default() -> Self {
717        Self::new(100, 0.95)
718    }
719}
720
721impl Fit<Array2<Float>, ()> for RiskKernel<Untrained> {
722    type Fitted = RiskKernel<Trained>;
723
724    fn fit(self, x: &Array2<Float>, _y: &()) -> Result<Self::Fitted> {
725        let n_samples = x.nrows();
726        if n_samples == 0 {
727            return Err(SklearsError::InvalidInput(
728                "Input array cannot be empty".to_string(),
729            ));
730        }
731
732        // Risk features dimension
733        let base_dim = 15;
734        let tail_dim = if self.include_tail_risk { 10 } else { 0 };
735        let feature_dim = base_dim + tail_dim;
736
737        let mut rng = thread_rng();
738        let normal = Normal::new(0.0, 1.0 / (feature_dim as Float).sqrt()).unwrap();
739
740        // Generate random projection
741        let mut projection = Array2::zeros((feature_dim, self.n_components));
742        for i in 0..feature_dim {
743            for j in 0..self.n_components {
744                projection[[i, j]] = normal.sample(&mut rng);
745            }
746        }
747
748        Ok(RiskKernel {
749            n_components: self.n_components,
750            confidence_level: self.confidence_level,
751            include_tail_risk: self.include_tail_risk,
752            projection: Some(projection),
753            _state: PhantomData,
754        })
755    }
756}
757
758impl Transform<Array2<Float>, Array2<Float>> for RiskKernel<Trained> {
759    fn transform(&self, x: &Array2<Float>) -> Result<Array2<Float>> {
760        let n_samples = x.nrows();
761        let n_features = x.ncols();
762
763        if n_samples == 0 {
764            return Err(SklearsError::InvalidInput(
765                "Input array cannot be empty".to_string(),
766            ));
767        }
768
769        let projection = self.projection.as_ref().unwrap();
770        let feature_dim = projection.nrows();
771
772        // Extract risk features
773        let mut risk_features = Array2::zeros((n_samples, feature_dim));
774
775        for i in 0..n_samples {
776            let mut feat_idx = 0;
777
778            // VaR approximation using parametric method
779            let mut mean = 0.0;
780            let mut variance = 0.0;
781            for j in 0..n_features {
782                mean += x[[i, j]];
783            }
784            mean /= n_features as Float;
785
786            for j in 0..n_features {
787                variance += (x[[i, j]] - mean).powi(2);
788            }
789            variance /= n_features as Float;
790            let std_dev = variance.sqrt();
791
792            // Normal VaR (parametric)
793            let z_score = 1.645; // Approximate z-score for 95% confidence
794            let var = mean - z_score * std_dev;
795            if feat_idx < feature_dim {
796                risk_features[[i, feat_idx]] = var;
797                feat_idx += 1;
798            }
799
800            // CVaR approximation (expected shortfall)
801            let cvar = mean - (2.5 * std_dev); // Simplified CVaR
802            if feat_idx < feature_dim {
803                risk_features[[i, feat_idx]] = cvar;
804                feat_idx += 1;
805            }
806
807            // Downside deviation
808            let mut downside_var = 0.0;
809            let mut downside_count = 0;
810            for j in 0..n_features {
811                if x[[i, j]] < mean {
812                    downside_var += (x[[i, j]] - mean).powi(2);
813                    downside_count += 1;
814                }
815            }
816            if downside_count > 0 {
817                let downside_dev = (downside_var / downside_count as Float).sqrt();
818                if feat_idx < feature_dim {
819                    risk_features[[i, feat_idx]] = downside_dev;
820                    feat_idx += 1;
821                }
822            }
823
824            // Maximum drawdown approximation
825            let mut max_dd = 0.0;
826            let mut peak = x[[i, 0]];
827            for j in 0..n_features {
828                if x[[i, j]] > peak {
829                    peak = x[[i, j]];
830                }
831                let dd = (peak - x[[i, j]]) / (peak.abs() + 1e-8);
832                if dd > max_dd {
833                    max_dd = dd;
834                }
835            }
836            if feat_idx < feature_dim {
837                risk_features[[i, feat_idx]] = max_dd;
838                feat_idx += 1;
839            }
840
841            // Tail risk measures
842            if self.include_tail_risk {
843                // Skewness approximation
844                let mut skew = 0.0;
845                for j in 0..n_features {
846                    skew += ((x[[i, j]] - mean) / (std_dev + 1e-8)).powi(3);
847                }
848                skew /= n_features as Float;
849                if feat_idx < feature_dim {
850                    risk_features[[i, feat_idx]] = skew;
851                    feat_idx += 1;
852                }
853
854                // Kurtosis approximation
855                let mut kurt = 0.0;
856                for j in 0..n_features {
857                    kurt += ((x[[i, j]] - mean) / (std_dev + 1e-8)).powi(4);
858                }
859                kurt /= n_features as Float;
860                if feat_idx < feature_dim {
861                    risk_features[[i, feat_idx]] = kurt - 3.0; // Excess kurtosis
862                }
863            }
864        }
865
866        // Apply random projection
867        let features = risk_features.dot(projection);
868
869        Ok(features)
870    }
871}
872
873// ============================================================================
874// Tests
875// ============================================================================
876
877#[cfg(test)]
878mod tests {
879    use super::*;
880    use scirs2_core::ndarray::array;
881
882    #[test]
883    fn test_financial_kernel_basic() {
884        let x = array![[1.0, 2.0, 3.0, 4.0], [5.0, 6.0, 7.0, 8.0]];
885
886        let kernel = FinancialKernel::new(50, 10);
887        let fitted = kernel.fit(&x, &()).unwrap();
888        let features = fitted.transform(&x).unwrap();
889
890        assert_eq!(features.shape(), &[2, 50]);
891    }
892
893    #[test]
894    fn test_financial_kernel_volatility() {
895        let x = array![[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]];
896
897        let kernel = FinancialKernel::new(40, 5).include_volatility(true);
898        let fitted = kernel.fit(&x, &()).unwrap();
899        let features = fitted.transform(&x).unwrap();
900
901        assert_eq!(features.shape(), &[2, 40]);
902    }
903
904    #[test]
905    fn test_volatility_kernel_ewma() {
906        let x = array![[1.0, 2.0, 3.0, 4.0], [5.0, 6.0, 7.0, 8.0]];
907
908        let kernel = VolatilityKernel::new(50, VolatilityModel::EWMA).decay_factor(0.94);
909        let fitted = kernel.fit(&x, &()).unwrap();
910        let features = fitted.transform(&x).unwrap();
911
912        assert_eq!(features.shape(), &[2, 50]);
913    }
914
915    #[test]
916    fn test_volatility_models() {
917        let x = array![[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]];
918
919        let models = vec![
920            VolatilityModel::Historical,
921            VolatilityModel::EWMA,
922            VolatilityModel::GARCH,
923            VolatilityModel::Realized,
924        ];
925
926        for model in models {
927            let kernel = VolatilityKernel::new(30, model);
928            let fitted = kernel.fit(&x, &()).unwrap();
929            let features = fitted.transform(&x).unwrap();
930            assert_eq!(features.shape(), &[2, 30]);
931        }
932    }
933
934    #[test]
935    fn test_econometric_kernel_basic() {
936        let x = array![[1.0, 2.0, 3.0, 4.0, 5.0], [6.0, 7.0, 8.0, 9.0, 10.0]];
937
938        let kernel = EconometricKernel::new(50, 3);
939        let fitted = kernel.fit(&x, &()).unwrap();
940        let features = fitted.transform(&x).unwrap();
941
942        assert_eq!(features.shape(), &[2, 50]);
943    }
944
945    #[test]
946    fn test_econometric_kernel_differences() {
947        let x = array![[1.0, 2.0, 3.0, 4.0], [5.0, 6.0, 7.0, 8.0]];
948
949        let kernel = EconometricKernel::new(40, 2).include_differences(true);
950        let fitted = kernel.fit(&x, &()).unwrap();
951        let features = fitted.transform(&x).unwrap();
952
953        assert_eq!(features.shape(), &[2, 40]);
954    }
955
956    #[test]
957    fn test_portfolio_kernel_basic() {
958        let x = array![[1.0, 2.0, 3.0, 4.0], [5.0, 6.0, 7.0, 8.0]];
959
960        let kernel = PortfolioKernel::new(50, 4);
961        let fitted = kernel.fit(&x, &()).unwrap();
962        let features = fitted.transform(&x).unwrap();
963
964        assert_eq!(features.shape(), &[2, 50]);
965    }
966
967    #[test]
968    fn test_portfolio_kernel_risk_free_rate() {
969        let x = array![[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]];
970
971        let kernel = PortfolioKernel::new(40, 3).risk_free_rate(0.03);
972        let fitted = kernel.fit(&x, &()).unwrap();
973        let features = fitted.transform(&x).unwrap();
974
975        assert_eq!(features.shape(), &[2, 40]);
976    }
977
978    #[test]
979    fn test_risk_kernel_basic() {
980        let x = array![[1.0, 2.0, 3.0, 4.0], [5.0, 6.0, 7.0, 8.0]];
981
982        let kernel = RiskKernel::new(50, 0.95);
983        let fitted = kernel.fit(&x, &()).unwrap();
984        let features = fitted.transform(&x).unwrap();
985
986        assert_eq!(features.shape(), &[2, 50]);
987    }
988
989    #[test]
990    fn test_risk_kernel_tail_risk() {
991        let x = array![[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]];
992
993        let kernel = RiskKernel::new(40, 0.99).include_tail_risk(true);
994        let fitted = kernel.fit(&x, &()).unwrap();
995        let features = fitted.transform(&x).unwrap();
996
997        assert_eq!(features.shape(), &[2, 40]);
998    }
999
1000    #[test]
1001    fn test_empty_input_error() {
1002        let x_empty: Array2<Float> = Array2::zeros((0, 3));
1003
1004        let kernel = FinancialKernel::new(50, 10);
1005        assert!(kernel.fit(&x_empty, &()).is_err());
1006
1007        let kernel2 = VolatilityKernel::new(50, VolatilityModel::EWMA);
1008        assert!(kernel2.fit(&x_empty, &()).is_err());
1009    }
1010}