ruvector_memopt/core/
optimizer.rs1use std::sync::Arc;
6use std::time::{Duration, Instant};
7use tokio::sync::RwLock;
8use tracing::{debug, error, info, warn};
9
10use super::config::OptimizerConfig;
11use super::patterns::MemoryPattern;
12use super::process_scorer::ProcessScorer;
13use crate::neural::engine::NeuralDecisionEngine;
14use crate::windows::memory::{MemoryStatus, OptimizationResult, WindowsMemoryOptimizer};
15use crate::bench::metrics::{BenchmarkMetrics, OptimizationMetrics};
16
17#[derive(Debug, Clone)]
19pub struct OptimizationDecision {
20 pub should_optimize: bool,
21 pub aggressive: bool,
22 pub confidence: f32,
23 pub reason: String,
24 pub target_processes: Vec<u32>,
25}
26
27pub struct IntelligentOptimizer {
29 config: OptimizerConfig,
30 windows_opt: WindowsMemoryOptimizer,
31 neural_engine: Option<Arc<RwLock<NeuralDecisionEngine>>>,
32 process_scorer: ProcessScorer,
33 last_optimization: Option<Instant>,
34 metrics: BenchmarkMetrics,
35}
36
37impl IntelligentOptimizer {
38 pub fn new(config: OptimizerConfig) -> Self {
40 let windows_opt = WindowsMemoryOptimizer::new();
41
42 let neural_engine = if config.neural_enabled {
43 match NeuralDecisionEngine::new(&config) {
44 Ok(engine) => Some(Arc::new(RwLock::new(engine))),
45 Err(e) => {
46 warn!("Failed to initialize neural engine: {}. Using rule-based fallback.", e);
47 None
48 }
49 }
50 } else {
51 None
52 };
53
54 Self {
55 config,
56 windows_opt,
57 neural_engine,
58 process_scorer: ProcessScorer::new(),
59 last_optimization: None,
60 metrics: BenchmarkMetrics::new(),
61 }
62 }
63
64 pub async fn evaluate(&self) -> Result<OptimizationDecision, String> {
66 let status = WindowsMemoryOptimizer::get_memory_status()?;
67 let pattern = MemoryPattern::from_status(&status);
68
69 if let Some(last) = self.last_optimization {
71 let elapsed = last.elapsed().as_secs();
72 if elapsed < self.config.min_interval_secs {
73 return Ok(OptimizationDecision {
74 should_optimize: false,
75 aggressive: false,
76 confidence: 1.0,
77 reason: format!("Cooldown: {}s remaining",
78 self.config.min_interval_secs - elapsed),
79 target_processes: vec![],
80 });
81 }
82 }
83
84 if let Some(ref engine) = self.neural_engine {
86 let engine = engine.read().await;
87 return engine.decide(&pattern, &status).await;
88 }
89
90 self.rule_based_decision(&status)
92 }
93
94 fn rule_based_decision(&self, status: &MemoryStatus) -> Result<OptimizationDecision, String> {
96 let load = status.memory_load_percent;
97
98 if load >= self.config.critical_threshold {
99 Ok(OptimizationDecision {
100 should_optimize: true,
101 aggressive: true,
102 confidence: 0.95,
103 reason: format!("Critical memory pressure: {}%", load),
104 target_processes: vec![], })
106 } else if load >= self.config.pressure_threshold {
107 let targets = self.process_scorer.get_trim_candidates(10);
109
110 Ok(OptimizationDecision {
111 should_optimize: true,
112 aggressive: false,
113 confidence: 0.8,
114 reason: format!("High memory pressure: {}%", load),
115 target_processes: targets,
116 })
117 } else {
118 Ok(OptimizationDecision {
119 should_optimize: false,
120 aggressive: false,
121 confidence: 0.9,
122 reason: format!("Memory OK: {}%", load),
123 target_processes: vec![],
124 })
125 }
126 }
127
128 pub async fn optimize(&mut self, decision: &OptimizationDecision) -> Result<OptimizationResult, String> {
130 if !decision.should_optimize {
131 return Err("Optimization not recommended".into());
132 }
133
134 let start = Instant::now();
135 info!("Starting optimization (aggressive={}): {}",
136 decision.aggressive, decision.reason);
137
138 let result = self.windows_opt.optimize(decision.aggressive)?;
140
141 let opt_metrics = OptimizationMetrics {
143 freed_mb: result.freed_mb,
144 processes_trimmed: result.processes_trimmed,
145 duration_ms: result.duration_ms,
146 aggressive: decision.aggressive,
147 confidence: decision.confidence,
148 };
149 self.metrics.record_optimization(&opt_metrics);
150
151 if self.config.learning_enabled {
153 if let Some(ref engine) = self.neural_engine {
154 let mut engine = engine.write().await;
155 let success = result.freed_mb > 100.0; engine.learn_from_result(&decision, &result, success).await;
157 }
158 }
159
160 self.last_optimization = Some(Instant::now());
161
162 info!("Optimization complete: freed {:.1} MB in {}ms",
163 result.freed_mb, start.elapsed().as_millis());
164
165 Ok(result)
166 }
167
168 pub async fn startup_optimize(&mut self) -> Result<OptimizationResult, String> {
170 info!("Running startup optimization mode");
171
172 tokio::time::sleep(Duration::from_secs(5)).await;
174
175 let decision = OptimizationDecision {
177 should_optimize: true,
178 aggressive: self.windows_opt.has_admin_privileges(),
179 confidence: 1.0,
180 reason: "Startup optimization".into(),
181 target_processes: vec![],
182 };
183
184 self.optimize(&decision).await
185 }
186
187 pub async fn run_loop(&mut self, interval: Duration) -> ! {
189 info!("Starting optimization loop (interval: {:?})", interval);
190
191 loop {
192 match self.evaluate().await {
193 Ok(decision) => {
194 if decision.should_optimize {
195 if let Err(e) = self.optimize(&decision).await {
196 error!("Optimization failed: {}", e);
197 }
198 } else {
199 debug!("Skipping: {}", decision.reason);
200 }
201 }
202 Err(e) => {
203 error!("Evaluation failed: {}", e);
204 }
205 }
206
207 tokio::time::sleep(interval).await;
208 }
209 }
210
211 pub fn get_metrics(&self) -> &BenchmarkMetrics {
213 &self.metrics
214 }
215
216 pub fn has_neural_engine(&self) -> bool {
218 self.neural_engine.is_some()
219 }
220}