1use crate::code_tools::ast_agent_tools::ASTAgentTools;
11use crate::code_tools::ast_agent_tools::AgentToolOp;
12use crate::code_tools::ast_agent_tools::AgentToolResult;
13use crate::modes::OperatingMode;
14use crate::subagents::AgentResult;
15use crate::subagents::AgentStatus;
16use crate::subagents::Finding;
17use crate::subagents::Severity;
18use crate::subagents::Subagent;
19use crate::subagents::SubagentContext;
20use crate::subagents::SubagentError;
21use crate::subagents::SubagentResult;
22use std::collections::HashMap;
23use std::future::Future;
24use std::path::Path;
25use std::path::PathBuf;
26use std::pin::Pin;
27use std::sync::Arc;
28use std::sync::atomic::AtomicBool;
29use std::sync::atomic::Ordering;
30use std::time::Duration;
31use std::time::SystemTime;
32
33#[derive(Debug)]
35pub struct PerformanceAgent {
36 name: String,
37 description: String,
38 _mode_override: Option<OperatingMode>,
39 _tool_permissions: Vec<String>,
40 _prompt_template: String,
41 optimization_level: OptimizationLevel,
42}
43
44#[derive(Debug, Clone, Copy, PartialEq, Eq)]
46pub enum OptimizationLevel {
47 Safe, Balanced, Aggressive, }
51
52impl Default for PerformanceAgent {
53 fn default() -> Self {
54 Self::new()
55 }
56}
57
58impl PerformanceAgent {
59 pub fn new() -> Self {
61 Self {
62 name: "performance".to_string(),
63 description: "Identifies and optimizes performance bottlenecks".to_string(),
64 _mode_override: Some(OperatingMode::Review),
65 _tool_permissions: vec![
66 "search".to_string(),
67 "tree".to_string(),
68 "grep".to_string(),
69 "edit".to_string(),
70 "think".to_string(),
71 ],
72 _prompt_template: r#"
73You are a performance optimization expert focused on:
74- Algorithm complexity reduction (O(n²) → O(n log n))
75- Memory usage optimization
76- Cache efficiency improvements
77- Query optimization (N+1, indexes, etc.)
78- Concurrency and parallelization
79- I/O optimization
80
81Always measure before and after optimization.
82Consider trade-offs between time and space complexity.
83"#
84 .to_string(),
85 optimization_level: OptimizationLevel::Balanced,
86 }
87 }
88
89 pub const fn with_optimization_level(mut self, level: OptimizationLevel) -> Self {
91 self.optimization_level = level;
92 self
93 }
94
95 async fn analyze_complexity(&self, ast_tools: &mut ASTAgentTools, file: &Path) -> Vec<Finding> {
97 let mut findings = Vec::new();
98
99 if let Ok(AgentToolResult::Patterns(patterns)) =
101 ast_tools.execute(AgentToolOp::FindPatterns {
102 pattern_type: crate::code_tools::ast_agent_tools::PatternType::NestedLoop,
103 scope: crate::code_tools::search::SearchScope::Files(vec![file.to_path_buf()]),
104 })
105 {
106 for pattern in patterns {
107 if let Ok(AgentToolResult::LoopAnalysis(analysis)) =
109 ast_tools.execute(AgentToolOp::AnalyzeLoop {
110 location: pattern.location.clone(),
111 })
112 && analysis.nesting_depth >= 2
113 {
114 let complexity = match analysis.nesting_depth {
115 2 => "O(n²)",
116 3 => "O(n³)",
117 _ => "O(n^k) where k > 3",
118 };
119
120 findings.push(Finding {
121 category: "algorithm-complexity".to_string(),
122 severity: if analysis.nesting_depth > 2 {
123 Severity::Critical
124 } else {
125 Severity::High
126 },
127 title: format!("High Algorithmic Complexity: {}", complexity),
128 description: format!(
129 "Found {}-level nested loops at {}:{}. This results in {} time complexity.",
130 analysis.nesting_depth,
131 pattern.location.file.display(),
132 pattern.location.line,
133 complexity
134 ),
135 location: Some(pattern.location),
136 suggestion: Some(
137 "Consider using more efficient algorithms like hash maps, sorting, or divide-and-conquer".to_string()
138 ),
139 metadata: HashMap::from([
140 ("complexity".to_string(), serde_json::json!(complexity)),
141 ("nesting_depth".to_string(), serde_json::json!(analysis.nesting_depth)),
142 ]),
143 });
144 }
145 }
146 }
147
148 if let Ok(AgentToolResult::Patterns(patterns)) =
150 ast_tools.execute(AgentToolOp::FindPatterns {
151 pattern_type:
152 crate::code_tools::ast_agent_tools::PatternType::StringConcatenationInLoop,
153 scope: crate::code_tools::search::SearchScope::Files(vec![file.to_path_buf()]),
154 })
155 {
156 for pattern in patterns {
157 findings.push(Finding {
158 category: "memory-performance".to_string(),
159 severity: Severity::Medium,
160 title: "Inefficient String Concatenation".to_string(),
161 description: format!(
162 "String concatenation in loop at {}:{}. This creates O(n²) memory allocations.",
163 pattern.location.file.display(),
164 pattern.location.line
165 ),
166 location: Some(pattern.location),
167 suggestion: Some("Use StringBuilder, StringBuffer, or join operations instead".to_string()),
168 metadata: HashMap::from([
169 ("issue_type".to_string(), serde_json::json!("string_concatenation")),
170 ]),
171 });
172 }
173 }
174
175 findings
176 }
177
178 async fn analyze_queries(&self, ast_tools: &mut ASTAgentTools, file: &Path) -> Vec<Finding> {
180 let mut findings = Vec::new();
181
182 if let Ok(AgentToolResult::Patterns(patterns)) =
184 ast_tools.execute(AgentToolOp::FindPatterns {
185 pattern_type: crate::code_tools::ast_agent_tools::PatternType::NPlusOneQuery,
186 scope: crate::code_tools::search::SearchScope::Files(vec![file.to_path_buf()]),
187 })
188 {
189 for pattern in patterns {
190 findings.push(Finding {
191 category: "database-performance".to_string(),
192 severity: Severity::High,
193 title: "N+1 Query Problem".to_string(),
194 description: format!(
195 "N+1 query pattern detected at {}:{}. This causes exponential database queries.",
196 pattern.location.file.display(),
197 pattern.location.line
198 ),
199 location: Some(pattern.location),
200 suggestion: Some(
201 "Use eager loading (includes/joins) or batch loading to fetch related data efficiently".to_string()
202 ),
203 metadata: HashMap::from([
204 ("issue_type".to_string(), serde_json::json!("n_plus_one")),
205 ]),
206 });
207 }
208 }
209
210 if let Ok(AgentToolResult::Patterns(patterns)) =
212 ast_tools.execute(AgentToolOp::FindPatterns {
213 pattern_type: crate::code_tools::ast_agent_tools::PatternType::UnindexedQuery,
214 scope: crate::code_tools::search::SearchScope::Files(vec![file.to_path_buf()]),
215 })
216 {
217 for pattern in patterns {
218 findings.push(Finding {
219 category: "database-performance".to_string(),
220 severity: Severity::Medium,
221 title: "Potentially Missing Index".to_string(),
222 description: format!(
223 "Query without index hint at {}:{}. This may cause full table scans.",
224 pattern.location.file.display(),
225 pattern.location.line
226 ),
227 location: Some(pattern.location),
228 suggestion: Some(
229 "Add appropriate database indexes for frequently queried columns"
230 .to_string(),
231 ),
232 metadata: HashMap::from([(
233 "issue_type".to_string(),
234 serde_json::json!("missing_index"),
235 )]),
236 });
237 }
238 }
239
240 findings
241 }
242
243 async fn analyze_memory(&self, ast_tools: &mut ASTAgentTools, file: &Path) -> Vec<Finding> {
245 let mut findings = Vec::new();
246
247 if let Ok(AgentToolResult::Patterns(patterns)) =
249 ast_tools.execute(AgentToolOp::FindPatterns {
250 pattern_type: crate::code_tools::ast_agent_tools::PatternType::LargeAllocation,
251 scope: crate::code_tools::search::SearchScope::Files(vec![file.to_path_buf()]),
252 })
253 {
254 for pattern in patterns {
255 findings.push(Finding {
256 category: "memory-performance".to_string(),
257 severity: Severity::Medium,
258 title: "Large Memory Allocation".to_string(),
259 description: format!(
260 "Large memory allocation at {}:{}. Consider streaming or chunking.",
261 pattern.location.file.display(),
262 pattern.location.line
263 ),
264 location: Some(pattern.location),
265 suggestion: Some(
266 "Use streaming, pagination, or lazy loading for large data sets"
267 .to_string(),
268 ),
269 metadata: HashMap::from([(
270 "issue_type".to_string(),
271 serde_json::json!("large_allocation"),
272 )]),
273 });
274 }
275 }
276
277 if let Ok(AgentToolResult::Patterns(patterns)) =
279 ast_tools.execute(AgentToolOp::FindPatterns {
280 pattern_type: crate::code_tools::ast_agent_tools::PatternType::MemoryLeak,
281 scope: crate::code_tools::search::SearchScope::Files(vec![file.to_path_buf()]),
282 })
283 {
284 for pattern in patterns {
285 findings.push(Finding {
286 category: "memory-performance".to_string(),
287 severity: Severity::Critical,
288 title: "Potential Memory Leak".to_string(),
289 description: format!(
290 "Potential memory leak at {}:{}. Resources not properly released.",
291 pattern.location.file.display(),
292 pattern.location.line
293 ),
294 location: Some(pattern.location),
295 suggestion: Some(
296 "Ensure all resources are properly freed using RAII or try-finally blocks"
297 .to_string(),
298 ),
299 metadata: HashMap::from([(
300 "issue_type".to_string(),
301 serde_json::json!("memory_leak"),
302 )]),
303 });
304 }
305 }
306
307 findings
308 }
309
310 async fn analyze_concurrency(
312 &self,
313 ast_tools: &mut ASTAgentTools,
314 file: &Path,
315 ) -> Vec<Finding> {
316 let mut findings = Vec::new();
317
318 if let Ok(AgentToolResult::Patterns(patterns)) =
320 ast_tools.execute(AgentToolOp::FindPatterns {
321 pattern_type: crate::code_tools::ast_agent_tools::PatternType::BlockingIO,
322 scope: crate::code_tools::search::SearchScope::Files(vec![file.to_path_buf()]),
323 })
324 {
325 for pattern in patterns {
326 findings.push(Finding {
327 category: "concurrency-performance".to_string(),
328 severity: Severity::Medium,
329 title: "Blocking I/O Operation".to_string(),
330 description: format!(
331 "Blocking I/O at {}:{}. This reduces throughput and scalability.",
332 pattern.location.file.display(),
333 pattern.location.line
334 ),
335 location: Some(pattern.location),
336 suggestion: Some(
337 "Use async/await or non-blocking I/O for better concurrency".to_string(),
338 ),
339 metadata: HashMap::from([(
340 "issue_type".to_string(),
341 serde_json::json!("blocking_io"),
342 )]),
343 });
344 }
345 }
346
347 findings
348 }
349}
350
351impl Subagent for PerformanceAgent {
352 fn name(&self) -> &str {
353 &self.name
354 }
355
356 fn description(&self) -> &str {
357 &self.description
358 }
359
360 fn execute<'a>(
361 &'a self,
362 context: &'a SubagentContext,
363 ast_tools: &'a mut ASTAgentTools,
364 cancel_flag: Arc<AtomicBool>,
365 ) -> Pin<Box<dyn Future<Output = SubagentResult<AgentResult>> + Send + 'a>> {
366 Box::pin(async move {
367 let start_time = SystemTime::now();
368 let mut all_findings = Vec::new();
369 let mut analyzed_files = Vec::new();
370
371 let files = self.get_performance_targets(context)?;
373
374 for file in &files {
375 if cancel_flag.load(Ordering::Acquire) {
376 return Err(SubagentError::ExecutionFailed(
377 "Performance analysis cancelled".to_string(),
378 ));
379 }
380
381 analyzed_files.push(file.clone());
382
383 match self.optimization_level {
384 OptimizationLevel::Safe => {
385 let complexity_findings = self.analyze_complexity(ast_tools, file).await;
387 all_findings.extend(complexity_findings);
388 }
389 OptimizationLevel::Balanced => {
390 let complexity_findings = self.analyze_complexity(ast_tools, file).await;
392 let query_findings = self.analyze_queries(ast_tools, file).await;
393 let memory_findings = self.analyze_memory(ast_tools, file).await;
394 all_findings.extend(complexity_findings);
395 all_findings.extend(query_findings);
396 all_findings.extend(memory_findings);
397 }
398 OptimizationLevel::Aggressive => {
399 let complexity_findings = self.analyze_complexity(ast_tools, file).await;
401 let query_findings = self.analyze_queries(ast_tools, file).await;
402 let memory_findings = self.analyze_memory(ast_tools, file).await;
403 let concurrency_findings = self.analyze_concurrency(ast_tools, file).await;
404 all_findings.extend(complexity_findings);
405 all_findings.extend(query_findings);
406 all_findings.extend(memory_findings);
407 all_findings.extend(concurrency_findings);
408 }
409 }
410 }
411
412 all_findings.sort_by(|a, b| a.severity.cmp(&b.severity));
414
415 let critical_count = all_findings
416 .iter()
417 .filter(|f| f.severity == Severity::Critical)
418 .count();
419 let high_count = all_findings
420 .iter()
421 .filter(|f| f.severity == Severity::High)
422 .count();
423
424 let summary = format!(
425 "Performance analysis completed: {} files analyzed, {} issues found (Critical: {}, High: {})",
426 analyzed_files.len(),
427 all_findings.len(),
428 critical_count,
429 high_count
430 );
431
432 let performance_issues = all_findings.len();
434
435 let execution_time = SystemTime::now()
436 .duration_since(start_time)
437 .unwrap_or_else(|_| Duration::from_secs(0));
438
439 Ok(AgentResult {
440 agent_name: self.name.clone(),
441 status: AgentStatus::Completed,
442 findings: all_findings,
443 analyzed_files,
444 modified_files: Vec::new(),
445 execution_time,
446 summary,
447 metrics: HashMap::from([
448 (
449 "performance_issues".to_string(),
450 serde_json::json!(performance_issues),
451 ),
452 (
453 "critical_issues".to_string(),
454 serde_json::json!(critical_count),
455 ),
456 (
457 "optimization_level".to_string(),
458 serde_json::json!(format!("{:?}", self.optimization_level)),
459 ),
460 ]),
461 })
462 })
463 }
464
465 fn capabilities(&self) -> Vec<String> {
466 vec![
467 "complexity-analysis".to_string(),
468 "query-optimization".to_string(),
469 "memory-profiling".to_string(),
470 "concurrency-analysis".to_string(),
471 "cache-optimization".to_string(),
472 ]
473 }
474
475 fn supports_file_type(&self, file_path: &Path) -> bool {
476 let supported = [
477 "rs", "py", "js", "ts", "go", "java", "cpp", "c", "rb", "scala",
478 ];
479 file_path
480 .extension()
481 .and_then(|ext| ext.to_str())
482 .map(|ext| supported.contains(&ext))
483 .unwrap_or(false)
484 }
485
486 fn execution_time_estimate(&self) -> Duration {
487 match self.optimization_level {
488 OptimizationLevel::Safe => Duration::from_secs(45),
489 OptimizationLevel::Balanced => Duration::from_secs(90),
490 OptimizationLevel::Aggressive => Duration::from_secs(150),
491 }
492 }
493}
494
495impl PerformanceAgent {
496 fn get_performance_targets(
497 &self,
498 context: &SubagentContext,
499 ) -> Result<Vec<PathBuf>, SubagentError> {
500 if let Some(files) = context.parameters.get("files") {
501 Ok(files.split(',').map(|s| PathBuf::from(s.trim())).collect())
502 } else {
503 Ok(vec![context.working_directory.clone()])
504 }
505 }
506}