scud/commands/spawn/
mod.rs

1//! Spawn command - Launch parallel Claude Code agents in tmux sessions
2//!
3//! This module provides functionality to:
4//! - Spawn multiple tmux windows with Claude Code sessions
5//! - Generate task-specific prompts for each agent
6//! - Track spawn session state for TUI integration
7//! - Install Claude Code hooks for automatic task completion
8
9pub mod agent;
10pub mod hooks;
11pub mod monitor;
12pub mod terminal;
13pub mod tui;
14
15use anyhow::Result;
16use colored::Colorize;
17use std::path::PathBuf;
18use std::thread;
19use std::time::Duration;
20
21use crate::commands::helpers::{flatten_all_tasks, resolve_group_tag};
22use crate::models::task::{Task, TaskStatus};
23use crate::storage::Storage;
24
25use self::monitor::SpawnSession;
26use self::terminal::Harness;
27
28/// Information about a task to spawn
29struct TaskInfo<'a> {
30    task: &'a Task,
31    tag: String,
32}
33
34/// Main entry point for the spawn command
35#[allow(clippy::too_many_arguments)]
36pub fn run(
37    project_root: Option<PathBuf>,
38    tag: Option<&str>,
39    limit: usize,
40    all_tags: bool,
41    dry_run: bool,
42    session: Option<String>,
43    attach: bool,
44    monitor: bool,
45    claim: bool,
46    harness_arg: &str,
47    model_arg: &str,
48) -> Result<()> {
49    let storage = Storage::new(project_root.clone());
50
51    if !storage.is_initialized() {
52        anyhow::bail!("SCUD not initialized. Run: scud init");
53    }
54
55    // Check tmux is available
56    terminal::check_tmux_available()?;
57
58    // Load all phases for cross-tag dependency checking
59    let all_phases = storage.load_tasks()?;
60    let all_tasks_flat = flatten_all_tasks(&all_phases);
61
62    // Determine phase tag
63    let phase_tag = if all_tags {
64        "all".to_string()
65    } else {
66        resolve_group_tag(&storage, tag, true)?
67    };
68
69    // Get ready tasks
70    let ready_tasks = get_ready_tasks(&all_phases, &all_tasks_flat, &phase_tag, limit, all_tags)?;
71
72    if ready_tasks.is_empty() {
73        println!("{}", "No ready tasks to spawn.".yellow());
74        println!("Check: scud list --status pending");
75        return Ok(());
76    }
77
78    // Parse harness
79    let harness = Harness::parse(harness_arg)?;
80
81    // Generate session name
82    let session_name = session.unwrap_or_else(|| format!("scud-{}", phase_tag));
83
84    // Display spawn plan
85    println!("{}", "SCUD Spawn".cyan().bold());
86    println!("{}", "═".repeat(50));
87    println!("{:<20} {}", "Terminal:".dimmed(), "tmux".green());
88    println!("{:<20} {}", "Harness:".dimmed(), harness.name().green());
89    println!("{:<20} {}", "Model:".dimmed(), model_arg.green());
90    println!("{:<20} {}", "Session:".dimmed(), session_name.cyan());
91    println!("{:<20} {}", "Tasks:".dimmed(), ready_tasks.len());
92    println!();
93
94    for (i, info) in ready_tasks.iter().enumerate() {
95        println!(
96            "  {} {} {} | {}",
97            format!("[{}]", i + 1).dimmed(),
98            info.tag.dimmed(),
99            info.task.id.cyan(),
100            info.task.title
101        );
102    }
103    println!();
104
105    if dry_run {
106        println!("{}", "Dry run - no terminals spawned.".yellow());
107        return Ok(());
108    }
109
110    // Get working directory
111    let working_dir = project_root
112        .clone()
113        .unwrap_or_else(|| std::env::current_dir().unwrap_or_default());
114
115    // Check and install Claude Code hooks for automatic task completion
116    if !hooks::hooks_installed(&working_dir) {
117        println!(
118            "{}",
119            "Installing Claude Code hooks for task completion...".dimmed()
120        );
121        if let Err(e) = hooks::install_hooks(&working_dir) {
122            println!(
123                "  {} Hook installation: {}",
124                "!".yellow(),
125                e.to_string().dimmed()
126            );
127        } else {
128            println!(
129                "  {} Hooks installed (tasks auto-complete on agent stop)",
130                "✓".green()
131            );
132        }
133    }
134
135    // Create spawn session metadata
136    let mut spawn_session = SpawnSession::new(
137        &session_name,
138        &phase_tag,
139        "tmux",
140        &working_dir.to_string_lossy(),
141    );
142
143    // Spawn agents
144    println!("{}", "Spawning agents...".green());
145
146    let mut success_count = 0;
147    let mut claimed_tasks: Vec<(String, String)> = Vec::new(); // (task_id, tag) pairs for claiming
148
149    for info in &ready_tasks {
150        // Resolve agent config (harness, model, prompt) from task's agent_type
151        let config = agent::resolve_agent_config(
152            info.task,
153            &info.tag,
154            harness,
155            Some(model_arg),
156            &working_dir,
157        );
158
159        // Warn if agent type was specified but definition not found
160        if info.task.agent_type.is_some() && !config.from_agent_def {
161            println!(
162                "  {} Agent '{}' not found, using CLI defaults",
163                "!".yellow(),
164                info.task.agent_type.as_deref().unwrap_or("unknown")
165            );
166        }
167
168        match terminal::spawn_terminal_with_harness_and_model(
169            &info.task.id,
170            &config.prompt,
171            &working_dir,
172            &session_name,
173            config.harness,
174            config.model.as_deref(),
175        ) {
176            Ok(window_index) => {
177                println!(
178                    "  {} Spawned: {} | {} [{}] {}:{}",
179                    "✓".green(),
180                    info.task.id.cyan(),
181                    info.task.title.dimmed(),
182                    config.display_info().dimmed(),
183                    session_name.dimmed(),
184                    window_index.dimmed(),
185                );
186                spawn_session.add_agent(&info.task.id, &info.task.title, &info.tag);
187                success_count += 1;
188
189                // Track for claiming
190                if claim {
191                    claimed_tasks.push((info.task.id.clone(), info.tag.clone()));
192                }
193            }
194            Err(e) => {
195                println!("  {} Failed: {} - {}", "✗".red(), info.task.id.red(), e);
196            }
197        }
198
199        // Small delay between spawns to avoid overwhelming the system
200        if success_count < ready_tasks.len() {
201            thread::sleep(Duration::from_millis(500));
202        }
203    }
204
205    // Claim tasks (mark as in-progress) if requested
206    if claim && !claimed_tasks.is_empty() {
207        println!();
208        println!("{}", "Claiming tasks...".dimmed());
209        for (task_id, task_tag) in &claimed_tasks {
210            // Reload phase and update task status
211            match storage.load_group(task_tag) {
212                Ok(mut phase) => {
213                    if let Some(task) = phase.get_task_mut(task_id) {
214                        task.set_status(TaskStatus::InProgress);
215                        if let Err(e) = storage.update_group(task_tag, &phase) {
216                            println!(
217                                "  {} Claim failed: {} - {}",
218                                "!".yellow(),
219                                task_id,
220                                e.to_string().dimmed()
221                            );
222                        } else {
223                            println!(
224                                "  {} Claimed: {} → {}",
225                                "✓".green(),
226                                task_id.cyan(),
227                                "in-progress".yellow()
228                            );
229                        }
230                    }
231                }
232                Err(e) => {
233                    println!(
234                        "  {} Claim failed: {} - {}",
235                        "!".yellow(),
236                        task_id,
237                        e.to_string().dimmed()
238                    );
239                }
240            }
241        }
242    }
243
244    // Setup control window for tmux
245    if let Err(e) = terminal::setup_tmux_control_window(&session_name, &phase_tag) {
246        println!(
247            "  {} Control window setup: {}",
248            "!".yellow(),
249            e.to_string().dimmed()
250        );
251    }
252
253    // Save session metadata
254    if let Err(e) = monitor::save_session(project_root.as_ref(), &spawn_session) {
255        println!(
256            "  {} Session metadata: {}",
257            "!".yellow(),
258            e.to_string().dimmed()
259        );
260    }
261
262    // Summary
263    println!();
264    println!(
265        "{} {} of {} agents spawned",
266        "Summary:".blue().bold(),
267        success_count,
268        ready_tasks.len()
269    );
270
271    println!();
272    println!(
273        "To attach: {}",
274        format!("tmux attach -t {}", session_name).cyan()
275    );
276    println!(
277        "To list:   {}",
278        format!("tmux list-windows -t {}", session_name).dimmed()
279    );
280
281    // Monitor takes priority over attach
282    if monitor {
283        println!();
284        println!("Starting monitor...");
285        // Small delay to let agents start
286        thread::sleep(Duration::from_secs(1));
287        return tui::run(project_root, &session_name, false); // spawn mode, not swarm
288    }
289
290    // Attach if requested
291    if attach {
292        println!();
293        println!("Attaching to session...");
294        terminal::tmux_attach(&session_name)?;
295    }
296
297    Ok(())
298}
299
300/// Run the TUI monitor for a spawn or swarm session
301pub fn run_monitor(
302    project_root: Option<PathBuf>,
303    session: Option<String>,
304    swarm_mode: bool,
305) -> Result<()> {
306    use crate::commands::swarm::session as swarm_session;
307    use colored::Colorize;
308
309    // Debug: show project root being used
310    let project_root_display = project_root
311        .as_ref()
312        .and_then(|p| p.to_str())
313        .unwrap_or("current directory");
314
315    let mode_label = if swarm_mode { "swarm" } else { "spawn" };
316    eprintln!(
317        "{} Monitor ({}) looking for sessions in: {}",
318        "DEBUG:".yellow(),
319        mode_label,
320        project_root_display
321    );
322
323    // List available sessions based on mode
324    let session_name = match session {
325        Some(s) => s,
326        None => {
327            let sessions = if swarm_mode {
328                swarm_session::list_sessions(project_root.as_ref())?
329            } else {
330                monitor::list_sessions(project_root.as_ref())?
331            };
332            eprintln!(
333                "{} Found {} {} session(s): {:?}",
334                "DEBUG:".yellow(),
335                sessions.len(),
336                mode_label,
337                sessions
338            );
339            if sessions.is_empty() {
340                let cmd = if swarm_mode { "scud swarm" } else { "scud spawn" };
341                eprintln!(
342                    "{} No {} sessions found in: {}",
343                    "DEBUG:".yellow(),
344                    mode_label,
345                    project_root_display
346                );
347                eprintln!(
348                    "{} Run: {} --project {} (if needed)",
349                    "HINT:".cyan(),
350                    cmd,
351                    project_root_display
352                );
353                anyhow::bail!("No {} sessions found. Run: {}", mode_label, cmd);
354            }
355            if sessions.len() == 1 {
356                sessions[0].clone()
357            } else {
358                println!("{}", format!("Available {} sessions:", mode_label).cyan().bold());
359                for (i, s) in sessions.iter().enumerate() {
360                    println!("  {} {}", format!("[{}]", i + 1).dimmed(), s);
361                }
362                anyhow::bail!(
363                    "Multiple {} sessions found. Specify one with --session <name>",
364                    mode_label
365                );
366            }
367        }
368    };
369
370    tui::run(project_root, &session_name, swarm_mode)
371}
372
373/// List spawn sessions
374pub fn run_sessions(project_root: Option<PathBuf>, verbose: bool) -> Result<()> {
375    use colored::Colorize;
376
377    let sessions = monitor::list_sessions(project_root.as_ref())?;
378
379    if sessions.is_empty() {
380        println!("{}", "No spawn sessions found.".dimmed());
381        println!("Run: scud spawn -m --limit 3");
382        return Ok(());
383    }
384
385    println!("{}", "Spawn Sessions:".cyan().bold());
386    println!();
387
388    for session_name in &sessions {
389        if verbose {
390            // Load full session data
391            match monitor::load_session(project_root.as_ref(), session_name) {
392                Ok(session) => {
393                    let stats = monitor::SpawnStats::from(&session);
394                    println!(
395                        "  {} {} agents ({} running, {} done)",
396                        session_name.cyan(),
397                        format!("[{}]", stats.total_agents).dimmed(),
398                        stats.running.to_string().green(),
399                        stats.completed.to_string().blue()
400                    );
401                    println!(
402                        "    {} Tag: {}, Terminal: {}",
403                        "│".dimmed(),
404                        session.tag,
405                        session.terminal
406                    );
407                    println!(
408                        "    {} Created: {}",
409                        "└".dimmed(),
410                        session.created_at.dimmed()
411                    );
412                    println!();
413                }
414                Err(_) => {
415                    println!("  {} {}", session_name, "(unable to load)".red());
416                }
417            }
418        } else {
419            println!("  {}", session_name);
420        }
421    }
422
423    if !verbose {
424        println!();
425        println!(
426            "{}",
427            "Use -v for details, or: scud monitor --session <name>".dimmed()
428        );
429    }
430
431    Ok(())
432}
433
434/// Get ready tasks for spawning
435fn get_ready_tasks<'a>(
436    all_phases: &'a std::collections::HashMap<String, crate::models::phase::Phase>,
437    all_tasks_flat: &[&Task],
438    phase_tag: &str,
439    limit: usize,
440    all_tags: bool,
441) -> Result<Vec<TaskInfo<'a>>> {
442    let mut ready_tasks: Vec<TaskInfo<'a>> = Vec::new();
443
444    if all_tags {
445        // Collect from all phases
446        for (tag, phase) in all_phases {
447            for task in &phase.tasks {
448                if is_task_ready(task, phase, all_tasks_flat) {
449                    ready_tasks.push(TaskInfo {
450                        task,
451                        tag: tag.clone(),
452                    });
453                }
454            }
455        }
456    } else {
457        // Single phase
458        let phase = all_phases
459            .get(phase_tag)
460            .ok_or_else(|| anyhow::anyhow!("Phase '{}' not found", phase_tag))?;
461
462        for task in &phase.tasks {
463            if is_task_ready(task, phase, all_tasks_flat) {
464                ready_tasks.push(TaskInfo {
465                    task,
466                    tag: phase_tag.to_string(),
467                });
468            }
469        }
470    }
471
472    // Truncate to limit
473    ready_tasks.truncate(limit);
474
475    Ok(ready_tasks)
476}
477
478/// Check if a task is ready to be spawned
479fn is_task_ready(
480    task: &Task,
481    phase: &crate::models::phase::Phase,
482    all_tasks_flat: &[&Task],
483) -> bool {
484    // Must be pending
485    if task.status != TaskStatus::Pending {
486        return false;
487    }
488
489    // Must not be expanded (we want subtasks, not parent tasks)
490    if task.is_expanded() {
491        return false;
492    }
493
494    // If it's a subtask, parent must be expanded
495    if let Some(ref parent_id) = task.parent_id {
496        let parent_expanded = phase
497            .get_task(parent_id)
498            .map(|p| p.is_expanded())
499            .unwrap_or(false);
500        if !parent_expanded {
501            return false;
502        }
503    }
504
505    // All dependencies must be met
506    task.has_dependencies_met_refs(all_tasks_flat)
507}
508
509#[cfg(test)]
510mod tests {
511    use super::*;
512    use crate::models::phase::Phase;
513    use crate::models::task::Task;
514
515    #[test]
516    fn test_is_task_ready_basic() {
517        let mut phase = Phase::new("test".to_string());
518        let task = Task::new("1".to_string(), "Test".to_string(), "Desc".to_string());
519        phase.add_task(task);
520
521        let all_tasks: Vec<&Task> = phase.tasks.iter().collect();
522        assert!(is_task_ready(&phase.tasks[0], &phase, &all_tasks));
523    }
524
525    #[test]
526    fn test_is_task_ready_in_progress() {
527        let mut phase = Phase::new("test".to_string());
528        let mut task = Task::new("1".to_string(), "Test".to_string(), "Desc".to_string());
529        task.set_status(TaskStatus::InProgress);
530        phase.add_task(task);
531
532        let all_tasks: Vec<&Task> = phase.tasks.iter().collect();
533        assert!(!is_task_ready(&phase.tasks[0], &phase, &all_tasks));
534    }
535
536    #[test]
537    fn test_is_task_ready_blocked_by_deps() {
538        let mut phase = Phase::new("test".to_string());
539
540        let task1 = Task::new("1".to_string(), "First".to_string(), "Desc".to_string());
541
542        let mut task2 = Task::new("2".to_string(), "Second".to_string(), "Desc".to_string());
543        task2.dependencies = vec!["1".to_string()];
544
545        phase.add_task(task1);
546        phase.add_task(task2);
547
548        let all_tasks: Vec<&Task> = phase.tasks.iter().collect();
549
550        // Task 1 is ready (no deps)
551        assert!(is_task_ready(&phase.tasks[0], &phase, &all_tasks));
552        // Task 2 is NOT ready (dep not done)
553        assert!(!is_task_ready(&phase.tasks[1], &phase, &all_tasks));
554    }
555}