use std::path::PathBuf;
use anyhow::{Context, Result};
use bzzz_core::{FlowPattern, SwarmFileParser, Worker};
pub async fn execute(file: PathBuf) -> Result<()> {
println!("🔍 Inspecting: {}", file.display());
let swarm = SwarmFileParser::from_yaml_file(&file)
.with_context(|| format!("Failed to parse SwarmFile: {}", file.display()))?;
println!("\n📋 Basic Info");
println!(" ID: {}", swarm.id.as_str());
println!(" API Version: {:?}", swarm.api_version);
println!(" Kind: {:?}", swarm.kind);
println!("\n👥 Workers ({})", swarm.workers.len());
for worker in &swarm.workers {
print_worker_info(worker);
}
println!("\n🔄 Flow Pattern: {}", swarm.flow.type_name());
print_flow_details(&swarm.flow);
println!("\n📊 Data Flow");
analyze_data_flow(&swarm);
let warnings = detect_issues(&swarm);
if warnings.is_empty() {
println!("\n✅ No issues detected");
} else {
println!("\n⚠️ Warnings ({})", warnings.len());
for warning in warnings {
println!(" - {}", warning);
}
}
Ok(())
}
fn print_worker_info(worker: &Worker) {
let worker_type = if worker.a2a.is_some() {
"A2A"
} else if worker.spec.is_some() {
"Spec"
} else {
"Unknown"
};
let location: String = worker.a2a.clone().unwrap_or_else(|| {
worker
.spec
.clone()
.map(|s| s.to_string_lossy().into_owned())
.unwrap_or_default()
});
println!(" • {} [{}]: {}", worker.name, worker_type, location);
if !worker.input.is_empty() {
println!(" Input mappings: {} keys", worker.input.len());
}
if !worker.output.is_empty() {
println!(" Output mappings: {} keys", worker.output.len());
}
if let Some(runtime) = &worker.runtime {
println!(" Runtime override: {:?}", runtime);
}
}
fn print_flow_details(flow: &FlowPattern) {
match flow {
FlowPattern::Sequence { steps } => {
println!(" Steps: {} (in order)", steps.len());
for (i, step) in steps.iter().enumerate() {
println!(" {}. {}", i + 1, step);
}
}
FlowPattern::Parallel {
branches,
fail_fast,
} => {
println!(" Branches: {} (concurrent)", branches.len());
println!(" Fail fast: {}", fail_fast);
for branch in branches {
println!(" • {}", branch);
}
}
FlowPattern::Conditional {
condition,
then,
else_,
} => {
println!(" Condition: {}", condition);
println!(" Then: {}", then);
if let Some(else_branch) = else_ {
println!(" Else: {}", else_branch);
}
}
FlowPattern::Loop {
over,
do_,
max_iterations,
} => {
println!(" Iterate over: {}", over);
println!(" Execute: {}", do_);
let max_str = if *max_iterations == 0 {
"unlimited".to_string()
} else {
max_iterations.to_string()
};
println!(" Max iterations: {}", max_str);
}
FlowPattern::Delegate {
swarm,
worker_expr,
fallback,
..
} => {
if let Some(swarm_path) = swarm {
println!(" Delegates to: {}", swarm_path.display());
}
if let Some(expr) = worker_expr {
println!(" Worker expression: {}", expr);
}
if let Some(fb) = fallback {
println!(" Fallback: {}", fb);
}
}
FlowPattern::Supervisor {
workers,
restart_policy,
recovery_policy,
} => {
println!(" Supervising: {} workers", workers.len());
println!(" Restart policy: {:?}", restart_policy);
if let Some(rp) = recovery_policy {
println!(" Recovery policy:");
println!(" Retry attempts: {}", rp.retry_attempts);
if let Some(expr) = &rp.replan_expr {
println!(" Replan expression: {}", expr);
}
if let Some(fb) = &rp.replan_fallback {
println!(" Replan fallback: {}", fb);
}
if let Some(swarm) = &rp.decompose_swarm {
println!(" Decompose swarm: {}", swarm.display());
}
}
}
FlowPattern::Compete { workers } => {
println!(" Competing: {} workers (first wins)", workers.len());
for w in workers {
println!(" • {}", w);
}
}
FlowPattern::Escalation { primary, chain } => {
println!(" Primary: {}", primary);
println!(" Escalation chain: {} levels", chain.len());
for (i, level) in chain.iter().enumerate() {
println!(" {}. {}", i + 1, level);
}
}
FlowPattern::Alongside { main, side } => {
println!(" Main: {}", main);
println!(" Side workers: {} (background)", side.len());
for s in side {
println!(" • {}", s);
}
}
FlowPattern::Workflow { tasks, synthesis } => {
println!(" Tasks: {} (DAG execution)", tasks.len());
for task in tasks {
if task.depends_on.is_empty() {
println!(" • {} (no dependencies)", task.name);
} else {
println!(" • {} (depends on: {})", task.name, task.depends_on.join(", "));
}
}
if let Some(synth) = synthesis {
println!(" Synthesis: {}", synth);
}
}
}
}
fn analyze_data_flow(swarm: &bzzz_core::SwarmFile) {
let workers_with_input: Vec<_> = swarm
.workers
.iter()
.filter(|w| !w.input.is_empty())
.collect();
if workers_with_input.is_empty() {
println!(" No explicit input mappings defined");
} else {
println!(" Input mappings in {} workers", workers_with_input.len());
for worker in workers_with_input {
for (key, value) in &worker.input {
println!(" {}.{} <- {}", worker.name, key, format_value(value));
}
}
}
let workers_with_output: Vec<_> = swarm
.workers
.iter()
.filter(|w| !w.output.is_empty())
.collect();
if !workers_with_output.is_empty() {
println!(
" Output mappings in {} workers",
workers_with_output.len()
);
for worker in workers_with_output {
for (key, value) in &worker.output {
println!(" {}.{} -> {}", worker.name, key, format_value(value));
}
}
}
if !swarm.expose.is_empty() {
println!(" Exposed outputs: {} fields", swarm.expose.len());
for mapping in &swarm.expose {
println!(" {} <- {}", mapping.name, mapping.from);
}
}
}
fn format_value(value: &serde_json::Value) -> String {
if value.is_string() {
format!("\"{}\"", value.as_str().unwrap())
} else {
value.to_string()
}
}
fn detect_issues(swarm: &bzzz_core::SwarmFile) -> Vec<String> {
let mut warnings = Vec::new();
let worker_names: std::collections::HashSet<_> =
swarm.workers.iter().map(|w| &w.name).collect();
let referenced_workers = get_referenced_workers(&swarm.flow);
for worker in &referenced_workers {
if !worker_names.contains(&worker) {
warnings.push(format!("Undefined worker reference '{}' in flow", worker));
}
}
for worker in &swarm.workers {
if !referenced_workers.contains(&worker.name) {
warnings.push(format!(
"Unused worker '{}' (defined but not in flow)",
worker.name
));
}
}
for worker in &swarm.workers {
if worker.spec.is_none() && worker.a2a.is_none() {
warnings.push(format!(
"Worker '{}' has no spec or a2a defined",
worker.name
));
}
}
warnings
}
fn get_referenced_workers(flow: &FlowPattern) -> Vec<String> {
match flow {
FlowPattern::Sequence { steps } => steps.clone(),
FlowPattern::Parallel { branches, .. } => branches.clone(),
FlowPattern::Conditional { then, else_, .. } => {
let mut refs = vec![then.clone()];
if let Some(else_branch) = else_ {
refs.push(else_branch.clone());
}
refs
}
FlowPattern::Loop { do_, .. } => vec![do_.clone()],
FlowPattern::Delegate { fallback, .. } => {
fallback.clone().map(|f| vec![f]).unwrap_or_default()
}
FlowPattern::Supervisor { workers, .. } => workers.clone(),
FlowPattern::Compete { workers } => workers.clone(),
FlowPattern::Escalation { primary, chain } => {
let mut refs = vec![primary.clone()];
refs.extend(chain.clone());
refs
}
FlowPattern::Alongside { main, side } => {
let mut refs = vec![main.clone()];
refs.extend(side.clone());
refs
}
FlowPattern::Workflow { tasks, synthesis } => {
let mut refs: Vec<String> = tasks.iter().map(|t| t.name.clone()).collect();
if let Some(synth) = synthesis {
refs.push(synth.clone());
}
refs
}
}
}