Skip to main content

quantwave_core/indicators/
pma.rs

1use crate::indicators::metadata::{IndicatorMetadata, ParamDef};
2use crate::traits::Next;
3use std::collections::VecDeque;
4
5/// Projected Moving Average (PMA)
6///
7/// Based on John Ehlers' "Removing Moving Average Lag" (TASC March 2025).
8/// Adds the linear regression slope multiplied by half the length to a simple moving average
9/// to compensate for the lag inherent in moving averages.
10/// Returns (PMA, Predict).
11#[derive(Debug, Clone)]
12pub struct ProjectedMovingAverage {
13    length: usize,
14    window: VecDeque<f64>,
15    slope_history: VecDeque<f64>,
16    sum_x: f64,
17    sum_x2: f64,
18}
19
20impl ProjectedMovingAverage {
21    pub fn new(length: usize) -> Self {
22        let mut sum_x = 0.0;
23        let mut sum_x2 = 0.0;
24        for i in 1..=length {
25            let x = i as f64;
26            sum_x += x;
27            sum_x2 += x * x;
28        }
29        Self {
30            length,
31            window: VecDeque::with_capacity(length),
32            slope_history: VecDeque::from(vec![0.0; 3]),
33            sum_x,
34            sum_x2,
35        }
36    }
37}
38
39impl Default for ProjectedMovingAverage {
40    fn default() -> Self {
41        Self::new(20)
42    }
43}
44
45impl Next<f64> for ProjectedMovingAverage {
46    type Output = (f64, f64);
47
48    fn next(&mut self, input: f64) -> Self::Output {
49        self.window.push_front(input);
50        if self.window.len() > self.length {
51            self.window.pop_back();
52        }
53
54        if self.window.len() < self.length {
55            return (input, input);
56        }
57
58        let mut sum_y = 0.0;
59        let mut sum_xy = 0.0;
60
61        for i in 0..self.length {
62            let y = self.window[i];
63            let x = (i + 1) as f64;
64            sum_y += y;
65            sum_xy += x * y;
66        }
67
68        let n = self.length as f64;
69        let denom = n * self.sum_x2 - self.sum_x * self.sum_x;
70        let slope = if denom != 0.0 {
71            -(n * sum_xy - self.sum_x * sum_y) / denom
72        } else {
73            0.0
74        };
75        let sma = sum_y / n;
76        let pma = sma + slope * n / 2.0;
77
78        self.slope_history.pop_back();
79        self.slope_history.push_front(slope);
80
81        let predict = pma + 0.5 * (slope - self.slope_history[2]) * n;
82
83        (pma, predict)
84    }
85}
86
87pub const PROJECTED_MOVING_AVERAGE_METADATA: IndicatorMetadata = IndicatorMetadata {
88    name: "Projected Moving Average",
89    description: "A lag-compensated moving average that uses linear regression slope to project the average forward.",
90    params: &[ParamDef {
91        name: "length",
92        default: "20",
93        description: "Calculation length",
94    }],
95    formula_source: "https://github.com/lavs9/quantwave/blob/main/references/traderstipsreference/TRADERS’%20TIPS%20-%20MARCH%202025.html",
96    formula_latex: r#"
97\[
98Slope = -\frac{n \sum xy - \sum x \sum y}{n \sum x^2 - (\sum x)^2}
99\]
100\[
101PMA = SMA + Slope \cdot \frac{n}{2}
102\]
103\[
104Predict = PMA + 0.5 \cdot (Slope - Slope_{t-2}) \cdot n
105\]
106"#,
107    gold_standard_file: "pma.json",
108    category: "Ehlers DSP",
109};
110
111#[cfg(test)]
112mod tests {
113    use super::*;
114    use crate::traits::Next;
115    use proptest::prelude::*;
116
117    #[test]
118    fn test_pma_basic() {
119        let mut pma = ProjectedMovingAverage::new(20);
120        let inputs = vec![10.0; 40];
121        for input in inputs {
122            let (p, pr) = pma.next(input);
123            assert_eq!(p, 10.0);
124            assert_eq!(pr, 10.0);
125        }
126    }
127
128    proptest! {
129        #[test]
130        fn test_pma_parity(
131            inputs in prop::collection::vec(1.0..100.0, 40..100),
132        ) {
133            let length = 20;
134            let mut pma = ProjectedMovingAverage::new(length);
135            let streaming_results: Vec<(f64, f64)> = inputs.iter().map(|&x| pma.next(x)).collect();
136
137            // Batch implementation
138            let mut batch_results = Vec::with_capacity(inputs.len());
139            let mut slope_hist = vec![0.0; 3];
140            let mut sum_x = 0.0;
141            let mut sum_x2 = 0.0;
142            for i in 1..=length {
143                let x = i as f64;
144                sum_x += x;
145                sum_x2 += x * x;
146            }
147
148            for i in 0..inputs.len() {
149                if i < length - 1 {
150                    batch_results.push((inputs[i], inputs[i]));
151                    continue;
152                }
153
154                let mut sum_y = 0.0;
155                let mut sum_xy = 0.0;
156                for j in 0..length {
157                    let y = inputs[i - j];
158                    let x = (j + 1) as f64;
159                    sum_y += y;
160                    sum_xy += x * y;
161                }
162
163                let n = length as f64;
164                let denom = n * sum_x2 - sum_x * sum_x;
165                let slope = if denom != 0.0 {
166                    -(n * sum_xy - sum_x * sum_y) / denom
167                } else {
168                    0.0
169                };
170                let sma = sum_y / n;
171                let pma_val = sma + slope * n / 2.0;
172
173                slope_hist.insert(0, slope);
174                if slope_hist.len() > 3 {
175                    slope_hist.pop();
176                }
177
178                let predict = pma_val + 0.5 * (slope - slope_hist[2]) * n;
179                batch_results.push((pma_val, predict));
180            }
181
182            for (s, b) in streaming_results.iter().zip(batch_results.iter()) {
183                approx::assert_relative_eq!(s.0, b.0, epsilon = 1e-10);
184                approx::assert_relative_eq!(s.1, b.1, epsilon = 1e-10);
185            }
186        }
187    }
188}