use std::path::{Path, PathBuf};
use std::time::{Duration, Instant};
use tokio::sync::mpsc;
use tracing::{debug, info, warn};
use uuid::Uuid;
use terraphim_types::{FindingCategory, FindingSeverity, ReviewAgentOutput, ReviewFinding};
use crate::config::CompoundReviewConfig;
use crate::error::OrchestratorError;
use crate::scope::WorktreeManager;
const PROMPT_SECURITY: &str = include_str!("../prompts/review-security.md");
const PROMPT_ARCHITECTURE: &str = include_str!("../prompts/review-architecture.md");
const PROMPT_PERFORMANCE: &str = include_str!("../prompts/review-performance.md");
const PROMPT_QUALITY: &str = include_str!("../prompts/review-quality.md");
const PROMPT_DOMAIN: &str = include_str!("../prompts/review-domain.md");
const PROMPT_DESIGN_QUALITY: &str = include_str!("../prompts/review-design-quality.md");
#[derive(Debug, Clone)]
pub struct ReviewGroupDef {
pub agent_name: String,
pub category: FindingCategory,
pub llm_tier: String,
pub cli_tool: String,
pub model: Option<String>,
pub prompt_template: String,
pub prompt_content: &'static str,
pub visual_only: bool,
pub persona: Option<String>,
}
impl ReviewGroupDef {
pub fn prompt(&self) -> &str {
self.prompt_content
}
}
#[derive(Debug, Clone)]
pub struct SwarmConfig {
pub groups: Vec<ReviewGroupDef>,
pub timeout: Duration,
pub worktree_root: PathBuf,
pub repo_path: PathBuf,
pub base_branch: String,
pub max_concurrent_agents: usize,
pub create_prs: bool,
}
impl SwarmConfig {
pub fn from_compound_config(config: &CompoundReviewConfig) -> Self {
let mut groups = default_groups();
if let Some(ref cli_tool) = config.cli_tool {
for group in &mut groups {
group.cli_tool = cli_tool.clone();
}
}
if let Some(ref model) = config.model {
let composed = if let Some(ref provider) = config.provider {
let cli_tool_name = config.cli_tool.as_deref().unwrap_or("");
let cli_name = std::path::Path::new(cli_tool_name)
.file_name()
.and_then(|n| n.to_str())
.unwrap_or(cli_tool_name);
if cli_name == "opencode" {
format!("{}/{}", provider, model)
} else {
model.clone()
}
} else {
model.clone()
};
for group in &mut groups {
group.model = Some(composed.clone());
}
}
Self {
groups,
timeout: Duration::from_secs(config.max_duration_secs),
worktree_root: config.worktree_root.clone(),
repo_path: config.repo_path.clone(),
base_branch: config.base_branch.clone(),
max_concurrent_agents: config.max_concurrent_agents,
create_prs: config.create_prs,
}
}
pub fn from_compound_config_empty(config: &CompoundReviewConfig) -> Self {
Self {
groups: vec![],
timeout: Duration::from_secs(300),
worktree_root: config.worktree_root.clone(),
repo_path: config.repo_path.clone(),
base_branch: config.base_branch.clone(),
max_concurrent_agents: config.max_concurrent_agents,
create_prs: config.create_prs,
}
}
}
#[derive(Debug, Clone)]
pub struct CompoundReviewResult {
pub correlation_id: Uuid,
pub findings: Vec<ReviewFinding>,
pub agent_outputs: Vec<ReviewAgentOutput>,
pub pass: bool,
pub duration: Duration,
pub agents_run: usize,
pub agents_failed: usize,
}
impl CompoundReviewResult {
pub fn format_report(&self) -> String {
let verdict = if self.pass { "✅ PASS" } else { "❌ NO-GO" };
let duration_secs = self.duration.as_secs();
let mut report = "## Compound Review\n\n".to_string();
report.push_str(&format!(
"**Verdict: {}** | Duration: {}s | Agents: {} ({} failed)\n\n",
verdict, duration_secs, self.agents_run, self.agents_failed
));
if !self.findings.is_empty() {
report.push_str(&format!("### Findings ({})\n\n", self.findings.len()));
report.push_str("| Severity | File | Finding | Conf |\n");
report.push_str("|----------|------|---------|------|\n");
for f in &self.findings {
let sev = format!("{:?}", f.severity);
let file_loc = if !f.file.is_empty() {
if f.line > 0 {
format!("{}:{}", f.file, f.line)
} else {
f.file.clone()
}
} else {
"-".to_string()
};
let finding_text = if f.finding.len() > 120 {
format!("{}...", &f.finding[..117])
} else {
f.finding.clone()
};
report.push_str(&format!(
"| {} | {} | {} | {:.0}% |\n",
sev,
file_loc,
finding_text,
f.confidence * 100.0
));
}
report.push('\n');
} else {
report.push_str("**No findings.**\n\n");
}
report.push_str("### Per-Agent Summary\n\n");
for output in &self.agent_outputs {
let status = if output.pass { "✅" } else { "❌" };
report.push_str(&format!(
"- {} {}: {} finding(s) — {}\n",
status,
output.agent,
output.findings.len(),
output.summary
));
}
report
}
pub fn actionable_findings(&self) -> Vec<&ReviewFinding> {
self.findings
.iter()
.filter(|f| {
matches!(
f.severity,
FindingSeverity::Critical | FindingSeverity::High
)
})
.collect()
}
}
#[derive(Debug)]
pub struct CompoundReviewWorkflow {
config: SwarmConfig,
worktree_manager: WorktreeManager,
}
impl CompoundReviewWorkflow {
pub fn new(config: SwarmConfig) -> Self {
let worktree_manager = WorktreeManager::with_base(&config.repo_path, &config.worktree_root);
Self {
config,
worktree_manager,
}
}
pub fn from_compound_config(config: CompoundReviewConfig) -> Self {
let swarm_config = SwarmConfig::from_compound_config(&config);
Self::new(swarm_config)
}
pub async fn run(
&self,
git_ref: &str,
base_ref: &str,
) -> Result<CompoundReviewResult, OrchestratorError> {
let start = Instant::now();
let correlation_id = Uuid::new_v4();
info!(
correlation_id = %correlation_id,
git_ref = %git_ref,
base_ref = %base_ref,
"starting compound review swarm"
);
let changed_files = self.get_changed_files(git_ref, base_ref).await?;
debug!(count = changed_files.len(), "found changed files");
let has_visual = has_visual_changes(&changed_files);
let active_groups: Vec<&ReviewGroupDef> = self
.config
.groups
.iter()
.filter(|g| !g.visual_only || has_visual)
.collect();
info!(
total_groups = self.config.groups.len(),
active_groups = active_groups.len(),
has_visual_changes = has_visual,
"filtered review groups"
);
let worktree_name = format!("review-{}", correlation_id);
let worktree_path = self
.worktree_manager
.create_worktree(&worktree_name, git_ref)
.await
.map_err(|e| {
OrchestratorError::CompoundReviewFailed(format!("failed to create worktree: {}", e))
})?;
let (tx, mut rx) = mpsc::channel::<AgentResult>(active_groups.len().max(1));
let mut spawned_count = 0;
for group in active_groups {
let tx = tx.clone();
let group = group.clone();
let worktree_path = worktree_path.clone();
let changed_files = changed_files.clone();
let timeout = self.config.timeout;
let cli_tool = group.cli_tool.clone();
tokio::spawn(async move {
let result = run_single_agent(
&group,
&worktree_path,
&changed_files,
correlation_id,
timeout,
&cli_tool,
)
.await;
let _ = tx.send(result).await;
});
spawned_count += 1;
}
drop(tx);
let mut agent_outputs = Vec::new();
let mut failed_count = 0;
let collect_deadline =
tokio::time::Instant::now() + self.config.timeout + Duration::from_secs(10);
loop {
match tokio::time::timeout_at(collect_deadline, rx.recv()).await {
Ok(Some(result)) => match result {
AgentResult::Success(output) => {
info!(agent = %output.agent, findings = output.findings.len(), "agent completed");
agent_outputs.push(output);
}
AgentResult::Failed { agent_name, reason } => {
warn!(agent = %agent_name, error = %reason, "agent failed");
failed_count += 1;
agent_outputs.push(ReviewAgentOutput {
agent: agent_name,
findings: vec![],
summary: format!("Agent failed: {}", reason),
pass: false,
});
}
},
Ok(None) => break, Err(_) => {
warn!("collection deadline exceeded, using partial results");
break;
}
}
}
if let Err(e) = self.worktree_manager.remove_worktree(&worktree_name).await {
warn!(error = %e, "failed to cleanup worktree");
}
let all_findings: Vec<ReviewFinding> = agent_outputs
.iter()
.flat_map(|o| o.findings.clone())
.collect();
let deduplicated = terraphim_types::deduplicate_findings(all_findings);
let pass = agent_outputs.iter().all(|o| o.pass) && failed_count == 0;
let duration = start.elapsed();
info!(
correlation_id = %correlation_id,
agents_run = spawned_count,
agents_failed = failed_count,
total_findings = deduplicated.len(),
pass = %pass,
duration = ?duration,
"compound review completed"
);
Ok(CompoundReviewResult {
correlation_id,
findings: deduplicated,
agent_outputs,
pass,
duration,
agents_run: spawned_count,
agents_failed: failed_count,
})
}
pub fn default_groups() -> Vec<ReviewGroupDef> {
default_groups()
}
pub fn has_visual_changes(changed_files: &[String]) -> bool {
has_visual_changes(changed_files)
}
pub fn extract_review_output(
stdout: &str,
agent_name: &str,
category: FindingCategory,
) -> ReviewAgentOutput {
extract_review_output(stdout, agent_name, category)
}
async fn get_changed_files(
&self,
git_ref: &str,
base_ref: &str,
) -> Result<Vec<String>, OrchestratorError> {
let output = tokio::process::Command::new("git")
.args([
"-C",
self.config.repo_path.to_str().unwrap_or("."),
"diff",
"--name-only",
base_ref,
git_ref,
])
.env_remove("GIT_INDEX_FILE")
.output()
.await
.map_err(|e| {
OrchestratorError::CompoundReviewFailed(format!("git diff failed: {}", e))
})?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
return Err(OrchestratorError::CompoundReviewFailed(format!(
"git diff returned non-zero: {}",
stderr
)));
}
let stdout = String::from_utf8_lossy(&output.stdout);
let files: Vec<String> = stdout
.lines()
.filter(|line| !line.trim().is_empty())
.map(|line| line.to_string())
.collect();
Ok(files)
}
pub fn is_dry_run(&self) -> bool {
!self.config.create_prs
}
}
enum AgentResult {
Success(ReviewAgentOutput),
Failed { agent_name: String, reason: String },
}
async fn run_single_agent(
group: &ReviewGroupDef,
worktree_path: &Path,
changed_files: &[String],
_correlation_id: Uuid,
timeout: Duration,
cli_tool: &str,
) -> AgentResult {
let agent_name = &group.agent_name;
let prompt = group.prompt_content;
let mut cmd = tokio::process::Command::new(cli_tool);
let cli_name = std::path::Path::new(cli_tool)
.file_name()
.and_then(|n| n.to_str())
.unwrap_or(cli_tool);
match cli_name {
"opencode" => {
cmd.arg("run").arg("--format").arg("json");
if let Some(ref model) = group.model {
cmd.arg("-m").arg(model);
}
cmd.arg(prompt);
}
"claude" | "claude-code" => {
cmd.arg("-p").arg(prompt);
if let Some(ref model) = group.model {
cmd.arg("--model").arg(model);
}
}
"codex" => {
cmd.arg("exec").arg("--full-auto");
if let Some(ref model) = group.model {
cmd.arg("-m").arg(model);
}
cmd.arg(prompt);
}
_ => {
cmd.arg(prompt);
}
}
cmd.current_dir(worktree_path);
for file in changed_files {
cmd.arg(file);
}
debug!(
agent = %agent_name,
command = ?cmd,
"spawning review agent"
);
let result = tokio::time::timeout(timeout, cmd.output()).await;
match result {
Ok(Ok(output)) => {
let stdout = String::from_utf8_lossy(&output.stdout);
let review_output = extract_review_output(&stdout, agent_name, group.category);
AgentResult::Success(review_output)
}
Ok(Err(e)) => AgentResult::Failed {
agent_name: agent_name.clone(),
reason: format!("command execution failed: {}", e),
},
Err(_) => AgentResult::Failed {
agent_name: agent_name.clone(),
reason: "timeout exceeded".to_string(),
},
}
}
fn extract_review_output(
stdout: &str,
agent_name: &str,
category: FindingCategory,
) -> ReviewAgentOutput {
let unwrapped = unwrap_opencode_protocol(stdout);
for line in unwrapped.lines() {
let trimmed = line.trim();
if trimmed.is_empty() {
continue;
}
if let Ok(output) = serde_json::from_str::<ReviewAgentOutput>(trimmed) {
return output;
}
if trimmed.starts_with("```json") {
let json_content = trimmed
.strip_prefix("```json")
.and_then(|s| s.strip_suffix("```"))
.or_else(|| {
trimmed
.strip_prefix("```json")
.map(|s| s.trim_end_matches("```"))
});
if let Some(content) = json_content {
let clean_content = content.trim();
if let Ok(output) = serde_json::from_str::<ReviewAgentOutput>(clean_content) {
return output;
}
}
}
}
if let Ok(output) = serde_json::from_str::<ReviewAgentOutput>(&unwrapped) {
return output;
}
let mut findings = vec![];
let _lower = unwrapped.to_lowercase();
for line in unwrapped.lines() {
let line_lower = line.to_lowercase();
if line_lower.contains("critical")
|| line_lower.contains("vulnerability")
|| line_lower.contains("cve-")
|| line_lower.contains("rustsec-")
{
let severity = if line_lower.contains("critical") {
FindingSeverity::Critical
} else if line_lower.contains("high") {
FindingSeverity::High
} else {
FindingSeverity::Medium
};
findings.push(ReviewFinding {
file: String::new(),
line: 0,
severity,
category,
finding: line.trim().to_string(),
confidence: 0.7,
suggestion: None,
});
}
}
if !findings.is_empty() {
let count = findings.len();
return ReviewAgentOutput {
agent: agent_name.to_string(),
findings,
summary: format!("Extracted {} findings from unstructured output", count),
pass: false,
};
}
ReviewAgentOutput {
agent: agent_name.to_string(),
findings: vec![],
summary: format!(
"No structured output found in agent response. Output length: {} chars",
unwrapped.len()
),
pass: false,
}
}
fn unwrap_opencode_protocol(stdout: &str) -> String {
use serde_json::Value;
let mut result = String::new();
let mut has_protocol = false;
for line in stdout.lines() {
let trimmed = line.trim();
if trimmed.is_empty() {
continue;
}
if let Ok(val) = serde_json::from_str::<Value>(trimmed) {
if val.is_object() {
if let Some(text) = val
.get("part")
.and_then(|p| p.get("text"))
.and_then(|t| t.as_str())
{
has_protocol = true;
result.push_str(text);
result.push('\n');
continue;
}
if let Some(text) = val.get("text").and_then(|t| t.as_str()) {
has_protocol = true;
result.push_str(text);
result.push('\n');
continue;
}
if let Some(msg_type) = val.get("type").and_then(|t| t.as_str()) {
has_protocol = true;
let tool_name = val
.get("part")
.and_then(|p| p.get("tool"))
.and_then(|t| t.as_str())
.unwrap_or("unknown");
let status = val
.get("part")
.and_then(|p| p.get("state"))
.and_then(|s| s.get("status"))
.and_then(|s| s.as_str())
.unwrap_or("");
let input_path = val
.get("part")
.and_then(|p| p.get("state"))
.and_then(|s| s.get("input"))
.and_then(|i| {
i.get("filePath")
.or_else(|| i.get("path"))
.or_else(|| i.get("command"))
})
.and_then(|v| v.as_str())
.unwrap_or("");
if input_path.is_empty() {
result.push_str(&format!("[{}: {}]\n", msg_type, tool_name));
} else {
result.push_str(&format!(
"[{}: {} {} {}]\n",
msg_type, tool_name, input_path, status
));
}
continue;
}
}
}
result.push_str(trimmed);
result.push('\n');
}
if has_protocol {
result
} else {
stdout.to_string()
}
}
fn has_visual_changes(changed_files: &[String]) -> bool {
let visual_patterns = get_visual_patterns();
for file in changed_files {
for pattern in &visual_patterns {
if glob_matches(file, pattern) {
return true;
}
}
}
false
}
fn get_visual_patterns() -> Vec<&'static str> {
vec![
"*.css",
"*.scss",
"tokens.*",
"DESIGN.md",
"*.svelte",
"*.tsx",
"*.vue",
"src/components/*",
"src/ui/*",
"design-system/*",
]
}
fn glob_matches(file: &str, pattern: &str) -> bool {
if file == pattern {
return true;
}
if pattern.starts_with("*.") {
let ext = &pattern[1..]; if file.ends_with(ext) {
return true;
}
}
if pattern.ends_with(".*") {
let prefix = &pattern[..pattern.len() - 1]; if file.starts_with(prefix) {
return true;
}
}
if pattern.ends_with("/*") {
let prefix = &pattern[..pattern.len() - 1]; if file.starts_with(prefix) {
return true;
}
}
if pattern.ends_with('/') && file.starts_with(pattern) {
return true;
}
false
}
fn default_groups() -> Vec<ReviewGroupDef> {
vec![
ReviewGroupDef {
agent_name: "security-sentinel".to_string(),
category: FindingCategory::Security,
llm_tier: "Quick".to_string(),
cli_tool: "opencode".to_string(),
model: None,
prompt_template: "crates/terraphim_orchestrator/prompts/review-security.md".to_string(),
prompt_content: PROMPT_SECURITY,
visual_only: false,
persona: Some("Vigil".to_string()),
},
ReviewGroupDef {
agent_name: "architecture-strategist".to_string(),
category: FindingCategory::Architecture,
llm_tier: "Deep".to_string(),
cli_tool: "claude".to_string(),
model: None,
prompt_template: "crates/terraphim_orchestrator/prompts/review-architecture.md"
.to_string(),
prompt_content: PROMPT_ARCHITECTURE,
visual_only: false,
persona: Some("Carthos".to_string()),
},
ReviewGroupDef {
agent_name: "performance-oracle".to_string(),
category: FindingCategory::Performance,
llm_tier: "Deep".to_string(),
cli_tool: "claude".to_string(),
model: None,
prompt_template: "crates/terraphim_orchestrator/prompts/review-performance.md"
.to_string(),
prompt_content: PROMPT_PERFORMANCE,
visual_only: false,
persona: Some("Ferrox".to_string()),
},
ReviewGroupDef {
agent_name: "rust-reviewer".to_string(),
category: FindingCategory::Quality,
llm_tier: "Deep".to_string(),
cli_tool: "claude".to_string(),
model: None,
prompt_template: "crates/terraphim_orchestrator/prompts/review-quality.md".to_string(),
prompt_content: PROMPT_QUALITY,
visual_only: false,
persona: Some("Ferrox".to_string()),
},
ReviewGroupDef {
agent_name: "domain-model-reviewer".to_string(),
category: FindingCategory::Domain,
llm_tier: "Quick".to_string(),
cli_tool: "opencode".to_string(),
model: None,
prompt_template: "crates/terraphim_orchestrator/prompts/review-domain.md".to_string(),
prompt_content: PROMPT_DOMAIN,
visual_only: false,
persona: Some("Carthos".to_string()),
},
ReviewGroupDef {
agent_name: "design-fidelity-reviewer".to_string(),
category: FindingCategory::DesignQuality,
llm_tier: "Deep".to_string(),
cli_tool: "claude".to_string(),
model: None,
prompt_template: "crates/terraphim_orchestrator/prompts/review-design-quality.md"
.to_string(),
prompt_content: PROMPT_DESIGN_QUALITY,
visual_only: true,
persona: Some("Lux".to_string()),
},
]
}
#[cfg(test)]
mod tests {
use super::*;
use terraphim_types::FindingSeverity;
#[test]
fn test_visual_file_detection_css() {
let files = vec!["styles.css".to_string()];
assert!(has_visual_changes(&files));
}
#[test]
fn test_visual_file_detection_tsx() {
let files = vec!["src/components/Button.tsx".to_string()];
assert!(has_visual_changes(&files));
}
#[test]
fn test_visual_file_detection_design_md() {
let files = vec!["DESIGN.md".to_string()];
assert!(has_visual_changes(&files));
}
#[test]
fn test_visual_file_detection_rust_only() {
let files = vec!["src/main.rs".to_string(), "src/lib.rs".to_string()];
assert!(!has_visual_changes(&files));
}
#[test]
fn test_visual_file_detection_component_dir() {
let files = vec!["src/components/mod.rs".to_string()];
assert!(has_visual_changes(&files));
}
#[test]
fn test_visual_file_detection_tokens() {
let files = vec!["tokens.json".to_string()];
assert!(has_visual_changes(&files));
}
#[test]
fn test_extract_review_output_valid_json() {
let json = r#"{"agent":"test-agent","findings":[],"summary":"All good","pass":true}"#;
let output = extract_review_output(json, "test-agent", FindingCategory::Quality);
assert_eq!(output.agent, "test-agent");
assert!(output.pass);
assert_eq!(output.findings.len(), 0);
}
#[test]
fn test_extract_review_output_mixed_output() {
let mixed = r#"Some log output here
{"agent":"test-agent","findings":[{"file":"src/lib.rs","line":42,"severity":"high","category":"security","finding":"Test issue","confidence":0.9}],"summary":"Found 1 issue","pass":false}
More logs..."#;
let output = extract_review_output(mixed, "test-agent", FindingCategory::Security);
assert_eq!(output.agent, "test-agent");
assert!(!output.pass);
assert_eq!(output.findings.len(), 1);
assert_eq!(output.findings[0].severity, FindingSeverity::High);
}
#[test]
fn test_extract_review_output_no_json() {
let no_json = "Just some plain text output without JSON";
let output = extract_review_output(no_json, "test-agent", FindingCategory::Quality);
assert_eq!(output.agent, "test-agent");
assert!(!output.pass); assert_eq!(output.findings.len(), 0);
}
#[test]
fn test_extract_review_output_markdown_code_block() {
let markdown = r#"Here's my review:
```json
{"agent":"test-agent","findings":[],"summary":"No issues","pass":true}
```
Done!"#;
let output = extract_review_output(markdown, "test-agent", FindingCategory::Quality);
assert_eq!(output.agent, "test-agent");
assert!(output.pass);
}
#[test]
fn test_default_groups_count() {
let groups = default_groups();
assert_eq!(groups.len(), 6);
}
#[test]
fn test_default_groups_one_visual_only() {
let groups = default_groups();
let visual_only_count = groups.iter().filter(|g| g.visual_only).count();
assert_eq!(visual_only_count, 1);
let visual_group = groups.iter().find(|g| g.visual_only).unwrap();
assert_eq!(visual_group.agent_name, "design-fidelity-reviewer");
assert_eq!(visual_group.category, FindingCategory::DesignQuality);
}
#[test]
fn test_default_groups_categories() {
let groups = default_groups();
let categories: Vec<_> = groups.iter().map(|g| g.category).collect();
assert!(categories.contains(&FindingCategory::Security));
assert!(categories.contains(&FindingCategory::Architecture));
assert!(categories.contains(&FindingCategory::Performance));
assert!(categories.contains(&FindingCategory::Quality));
assert!(categories.contains(&FindingCategory::Domain));
assert!(categories.contains(&FindingCategory::DesignQuality));
}
#[test]
fn test_glob_matches_extension() {
assert!(glob_matches("styles.css", "*.css"));
assert!(glob_matches("app.scss", "*.scss"));
assert!(glob_matches("Component.tsx", "*.tsx"));
assert!(!glob_matches("main.rs", "*.css"));
}
#[test]
fn test_glob_matches_directory() {
assert!(glob_matches("src/components/Button.rs", "src/components/*"));
assert!(glob_matches("src/ui/mod.rs", "src/ui/*"));
assert!(!glob_matches("src/main.rs", "src/components/*"));
}
#[test]
fn test_glob_matches_exact() {
assert!(glob_matches("DESIGN.md", "DESIGN.md"));
assert!(!glob_matches("README.md", "DESIGN.md"));
}
#[test]
fn test_glob_matches_design_system() {
assert!(glob_matches("design-system/tokens.css", "design-system/*"));
assert!(glob_matches(
"design-system/components/button.css",
"design-system/*"
));
}
#[tokio::test]
async fn test_compound_review_dry_run() {
let swarm_config = SwarmConfig {
groups: default_groups(),
timeout: Duration::from_secs(60),
worktree_root: std::env::temp_dir().join("test-compound-review-worktrees"),
repo_path: PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../.."),
base_branch: "main".to_string(),
max_concurrent_agents: 3,
create_prs: false,
};
let workflow = CompoundReviewWorkflow::new(swarm_config);
assert!(workflow.is_dry_run());
}
#[tokio::test]
async fn test_get_changed_files_real_repo() {
let swarm_config = SwarmConfig {
groups: default_groups(),
timeout: Duration::from_secs(60),
worktree_root: std::env::temp_dir().join("test-compound-review-worktrees"),
repo_path: PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../.."),
base_branch: "main".to_string(),
max_concurrent_agents: 3,
create_prs: false,
};
let workflow = CompoundReviewWorkflow::new(swarm_config);
let result = workflow.get_changed_files("HEAD", "HEAD~1").await;
match result {
Ok(files) => {
for file in &files {
assert!(!file.is_empty());
}
}
Err(_) => {
}
}
}
#[test]
fn test_swarm_config_from_compound_config() {
let compound_config = CompoundReviewConfig {
schedule: "0 2 * * *".to_string(),
max_duration_secs: 1800,
repo_path: PathBuf::from("/tmp/repo"),
create_prs: false,
worktree_root: PathBuf::from("/tmp/worktrees"),
base_branch: "main".to_string(),
max_concurrent_agents: 3,
cli_tool: None,
provider: None,
model: None,
..Default::default()
};
let swarm_config = SwarmConfig::from_compound_config(&compound_config);
assert_eq!(swarm_config.repo_path, PathBuf::from("/tmp/repo"));
assert_eq!(swarm_config.worktree_root, PathBuf::from("/tmp/worktrees"));
assert_eq!(swarm_config.base_branch, "main");
assert_eq!(swarm_config.max_concurrent_agents, 3);
assert!(!swarm_config.create_prs);
assert_eq!(swarm_config.groups.len(), 6);
}
#[test]
fn test_compound_review_result_structure() {
let result = CompoundReviewResult {
correlation_id: Uuid::new_v4(),
findings: vec![],
agent_outputs: vec![],
pass: true,
duration: Duration::from_secs(10),
agents_run: 6,
agents_failed: 0,
};
assert!(result.pass);
assert_eq!(result.agents_run, 6);
assert_eq!(result.agents_failed, 0);
}
#[test]
fn test_review_security_contains_vigil() {
let prompt = include_str!("../prompts/review-security.md");
assert!(
prompt.contains("Vigil"),
"review-security.md should contain 'Vigil'"
);
assert!(
prompt.contains("Security Engineer"),
"review-security.md should mention Security Engineer"
);
}
#[test]
fn test_review_architecture_contains_carthos() {
let prompt = include_str!("../prompts/review-architecture.md");
assert!(
prompt.contains("Carthos"),
"review-architecture.md should contain 'Carthos'"
);
assert!(
prompt.contains("Domain Architect"),
"review-architecture.md should mention Domain Architect"
);
}
#[test]
fn test_review_quality_contains_ferrox() {
let prompt = include_str!("../prompts/review-quality.md");
assert!(
prompt.contains("Ferrox"),
"review-quality.md should contain 'Ferrox'"
);
assert!(
prompt.contains("Rust Engineer"),
"review-quality.md should mention Rust Engineer"
);
}
#[test]
fn test_review_performance_contains_ferrox() {
let prompt = include_str!("../prompts/review-performance.md");
assert!(
prompt.contains("Ferrox"),
"review-performance.md should contain 'Ferrox'"
);
assert!(
prompt.contains("Rust Engineer"),
"review-performance.md should mention Rust Engineer"
);
}
#[test]
fn test_review_domain_contains_carthos() {
let prompt = include_str!("../prompts/review-domain.md");
assert!(
prompt.contains("Carthos"),
"review-domain.md should contain 'Carthos'"
);
assert!(
prompt.contains("Domain Architect"),
"review-domain.md should mention Domain Architect"
);
}
#[test]
fn test_review_design_contains_lux() {
let prompt = include_str!("../prompts/review-design-quality.md");
assert!(
prompt.contains("Lux"),
"review-design-quality.md should contain 'Lux'"
);
assert!(
prompt.contains("TypeScript Engineer"),
"review-design-quality.md should mention TypeScript Engineer"
);
}
#[test]
fn test_default_groups_all_have_persona() {
let groups = default_groups();
for group in &groups {
assert!(
group.persona.is_some(),
"Group '{}' should have a persona set",
group.agent_name
);
}
let vigil = groups
.iter()
.find(|g| g.agent_name == "security-sentinel")
.unwrap();
assert_eq!(vigil.persona.as_ref().unwrap(), "Vigil");
let carthos_arch = groups
.iter()
.find(|g| g.agent_name == "architecture-strategist")
.unwrap();
assert_eq!(carthos_arch.persona.as_ref().unwrap(), "Carthos");
let ferrox_perf = groups
.iter()
.find(|g| g.agent_name == "performance-oracle")
.unwrap();
assert_eq!(ferrox_perf.persona.as_ref().unwrap(), "Ferrox");
let ferrox_qual = groups
.iter()
.find(|g| g.agent_name == "rust-reviewer")
.unwrap();
assert_eq!(ferrox_qual.persona.as_ref().unwrap(), "Ferrox");
let carthos_domain = groups
.iter()
.find(|g| g.agent_name == "domain-model-reviewer")
.unwrap();
assert_eq!(carthos_domain.persona.as_ref().unwrap(), "Carthos");
let lux = groups
.iter()
.find(|g| g.agent_name == "design-fidelity-reviewer")
.unwrap();
assert_eq!(lux.persona.as_ref().unwrap(), "Lux");
}
#[test]
fn test_extract_review_output_with_persona_agent_name() {
let json = r#"{"agent":"Vigil-security-sentinel","findings":[{"file":"src/lib.rs","line":42,"severity":"high","category":"security","finding":"Test issue","confidence":0.9}],"summary":"Found 1 security issue","pass":false}"#;
let output =
extract_review_output(json, "Vigil-security-sentinel", FindingCategory::Security);
assert_eq!(output.agent, "Vigil-security-sentinel");
assert!(!output.pass);
assert_eq!(output.findings.len(), 1);
}
#[test]
fn test_compound_config_cli_tool_override() {
let config = CompoundReviewConfig {
schedule: "0 2 * * *".to_string(),
max_duration_secs: 1800,
repo_path: PathBuf::from("/tmp"),
create_prs: false,
worktree_root: PathBuf::from("/tmp/worktrees"),
base_branch: "main".to_string(),
max_concurrent_agents: 3,
cli_tool: Some("/home/alex/.bun/bin/opencode".to_string()),
provider: Some("opencode-go".to_string()),
model: Some("glm-5".to_string()),
..Default::default()
};
let swarm = SwarmConfig::from_compound_config(&config);
for group in &swarm.groups {
assert_eq!(group.cli_tool, "/home/alex/.bun/bin/opencode");
assert_eq!(group.model, Some("opencode-go/glm-5".to_string()));
}
}
#[test]
fn test_compound_config_no_override() {
let config = CompoundReviewConfig {
schedule: "0 2 * * *".to_string(),
max_duration_secs: 1800,
repo_path: PathBuf::from("/tmp"),
create_prs: false,
worktree_root: PathBuf::from("/tmp/worktrees"),
base_branch: "main".to_string(),
max_concurrent_agents: 3,
cli_tool: None,
provider: None,
model: None,
..Default::default()
};
let swarm = SwarmConfig::from_compound_config(&config);
assert_eq!(swarm.groups[0].cli_tool, "opencode");
assert!(swarm.groups[0].model.is_none());
}
#[test]
fn test_unwrap_opencode_protocol_formats_tool_use() {
let protocol_output = r#"{"type":"text","part":{"type":"text","text":"Starting review..."}}
{"type":"tool_use","timestamp":1775340045267,"sessionID":"ses_abc","part":{"id":"prt_123","tool":"read","state":{"status":"completed","input":{"filePath":"/tmp/test.rs"},"output":"fn critical_path() { }"}}}
{"type":"text","part":{"type":"text","text":"Review complete. No issues found."}}"#;
let unwrapped = unwrap_opencode_protocol(protocol_output);
assert!(
!unwrapped.contains("critical_path"),
"tool_use payload content should not leak through"
);
assert!(
unwrapped.contains("[tool_use: read /tmp/test.rs completed]"),
"tool_use should be formatted as summary, got: {}",
unwrapped
);
assert!(unwrapped.contains("Starting review..."));
assert!(unwrapped.contains("Review complete."));
}
#[test]
fn test_extract_review_output_no_false_critical_from_tool_use() {
let protocol_output = r#"{"type":"text","part":{"type":"text","text":"Reviewing code..."}}
{"type":"tool_use","part":{"tool":"read","state":{"output":"FindingSeverity::Critical is used here"}}}
{"type":"text","part":{"type":"text","text":"All looks good, no issues."}}"#;
let output =
extract_review_output(protocol_output, "test-agent", FindingCategory::Security);
assert_eq!(
output.findings.len(),
0,
"tool_use payloads must not generate synthetic findings"
);
}
#[test]
fn test_compound_config_timeout_uses_max_duration() {
let config = CompoundReviewConfig {
schedule: "0 2 * * *".to_string(),
max_duration_secs: 900,
repo_path: PathBuf::from("/tmp"),
create_prs: false,
worktree_root: PathBuf::from("/tmp/worktrees"),
base_branch: "main".to_string(),
max_concurrent_agents: 3,
cli_tool: None,
provider: None,
model: None,
..Default::default()
};
let swarm = SwarmConfig::from_compound_config(&config);
assert_eq!(swarm.timeout, Duration::from_secs(900));
}
}