mod agent;
mod checkpoint;
mod config;
mod daemon;
mod deadlock;
mod dependency_graph;
mod git;
mod hooks;
mod init;
mod orchestrator;
mod plan;
mod prompts;
mod providers;
mod state;
mod story;
mod update;
mod validator;
use clap::{Args, Parser, Subcommand};
use std::path::Path;
#[derive(Parser, Debug)]
#[command(name = "regista", version, about = "🎬 AI agent director")]
struct Cli {
#[command(subcommand)]
command: Commands,
}
#[derive(Subcommand, Debug)]
enum Commands {
Plan(PlanArgs),
Auto(AutoArgs),
Run(RunArgs),
Logs(RepoArgs),
Status(RepoArgs),
Kill(RepoArgs),
Validate(ValidateArgs),
Init(InitArgs),
Update(UpdateArgs),
}
#[derive(Args, Debug)]
struct RepoArgs {
#[arg(default_value = ".", num_args = 0..=1)]
dir: String,
}
#[derive(Args, Debug)]
struct PlanModeArgs {
spec: String,
#[arg(long)]
replace: bool,
#[arg(long, default_value = "0")]
max_stories: u32,
}
#[derive(Args, Debug)]
struct CommonArgs {
#[arg(long)]
logs: bool,
#[arg(long)]
dry_run: bool,
#[arg(long)]
config: Option<String>,
#[arg(long)]
provider: Option<String>,
#[arg(long)]
quiet: bool,
}
#[derive(Args, Debug, Default)]
struct PipelineArgs {
#[arg(long)]
story: Option<String>,
#[arg(long, conflicts_with = "epics")]
epic: Option<String>,
#[arg(long, conflicts_with = "epic")]
epics: Option<String>,
#[arg(long)]
once: bool,
#[arg(long)]
resume: bool,
#[arg(long)]
clean_state: bool,
}
#[derive(Args, Debug)]
struct DaemonArgs {
#[arg(long, hide = true)]
daemon: bool,
#[arg(long, hide = true)]
log_file: Option<String>,
}
#[derive(Args, Debug)]
struct PlanArgs {
#[command(flatten)]
plan_mode: PlanModeArgs,
#[command(flatten)]
repo: RepoArgs,
#[command(flatten)]
common: CommonArgs,
#[command(flatten)]
daemon: DaemonArgs,
}
#[derive(Args, Debug)]
struct AutoArgs {
#[command(flatten)]
plan_mode: PlanModeArgs,
#[command(flatten)]
repo: RepoArgs,
#[command(flatten)]
pipeline: PipelineArgs,
#[command(flatten)]
common: CommonArgs,
#[command(flatten)]
daemon: DaemonArgs,
}
#[derive(Args, Debug)]
struct RunArgs {
#[command(flatten)]
repo: RepoArgs,
#[command(flatten)]
pipeline: PipelineArgs,
#[command(flatten)]
common: CommonArgs,
#[command(flatten)]
daemon: DaemonArgs,
}
#[derive(Args, Debug)]
struct ValidateArgs {
#[command(flatten)]
repo: RepoArgs,
#[arg(long)]
json: bool,
#[arg(long)]
config: Option<String>,
#[arg(long)]
provider: Option<String>,
}
#[derive(Args, Debug)]
struct InitArgs {
#[command(flatten)]
repo: RepoArgs,
#[arg(long)]
light: bool,
#[arg(long)]
with_example: bool,
#[arg(long, default_value = "pi")]
provider: String,
}
#[derive(Args, Debug)]
struct UpdateArgs {
#[arg(long)]
yes: bool,
}
fn main() {
let cli = Cli::parse();
match cli.command {
Commands::Plan(args) => handle_plan(args),
Commands::Auto(args) => handle_auto(args),
Commands::Run(args) => handle_run(args),
Commands::Logs(args) => handle_logs(args),
Commands::Status(args) => handle_status(args),
Commands::Kill(args) => handle_kill(args),
Commands::Validate(args) => handle_validate(args),
Commands::Init(args) => handle_init(args),
Commands::Update(args) => handle_update(args),
}
}
fn handle_plan(args: PlanArgs) {
let project_root = Path::new(&args.repo.dir);
if args.daemon.daemon {
setup_daemon_tracing(args.daemon.log_file.as_deref(), args.common.quiet);
let _cleanup = daemon::PidCleanup(project_root.to_path_buf());
let cfg = load_config(
project_root,
args.common.config.as_deref(),
args.common.provider.as_deref(),
);
match plan::run(
project_root,
Path::new(&args.plan_mode.spec),
&cfg,
args.plan_mode.max_stories,
args.plan_mode.replace,
) {
Ok(result) => {
tracing::info!(
"Groom completado: {} historias, {} épicas, {} iteraciones. Dependencias: {}",
result.stories_created,
result.epics_created,
result.iterations,
if result.dependencies_clean {
"limpias"
} else {
"con errores"
}
);
if !result.dependencies_clean {
std::process::exit(2);
}
}
Err(e) => {
tracing::error!("Groom falló: {e}");
std::process::exit(1);
}
}
return;
}
if args.common.dry_run {
setup_user_tracing(args.common.quiet, false, None);
let cfg = load_config(
project_root,
args.common.config.as_deref(),
args.common.provider.as_deref(),
);
match plan::run(
project_root,
Path::new(&args.plan_mode.spec),
&cfg,
args.plan_mode.max_stories,
args.plan_mode.replace,
) {
Ok(result) => {
println!("✅ Groom completado en {} iteraciones.", result.iterations);
println!(" Historias generadas: {}", result.stories_created);
println!(" Épicas generadas: {}", result.epics_created);
if result.dependencies_clean {
println!(" Grafo de dependencias: limpio ✅");
} else {
println!(" Grafo de dependencias: con errores ⚠️");
}
}
Err(e) => {
eprintln!("❌ Groom falló: {e}");
std::process::exit(1);
}
}
return;
}
let child_args = build_daemon_args(
"plan",
&args.repo.dir,
Some(&args.plan_mode.spec),
args.plan_mode.replace,
args.plan_mode.max_stories,
&PipelineArgs::default(),
&args.common,
);
spawn_and_optionally_follow(project_root, &child_args, args.common.logs);
}
fn handle_auto(args: AutoArgs) {
let project_root = Path::new(&args.repo.dir);
if args.pipeline.clean_state && !args.daemon.daemon {
checkpoint::OrchestratorState::remove(project_root);
println!("✅ Checkpoint eliminado.");
}
if args.daemon.daemon {
setup_daemon_tracing(args.daemon.log_file.as_deref(), args.common.quiet);
let _cleanup = daemon::PidCleanup(project_root.to_path_buf());
let cfg = load_config(
project_root,
args.common.config.as_deref(),
args.common.provider.as_deref(),
);
match plan::run(
project_root,
Path::new(&args.plan_mode.spec),
&cfg,
args.plan_mode.max_stories,
args.plan_mode.replace,
) {
Ok(plan_result) => {
tracing::info!(
"Groom completado: {} historias, {} épicas, deps={}",
plan_result.stories_created,
plan_result.epics_created,
if plan_result.dependencies_clean {
"limpias"
} else {
"con errores"
}
);
if plan_result.stories_created == 0 {
tracing::warn!("No se generaron historias. Omitiendo pipeline.");
return;
}
if !plan_result.dependencies_clean {
tracing::warn!("Grafo de dependencias con errores. Omitiendo pipeline.");
std::process::exit(2);
}
}
Err(e) => {
tracing::error!("Groom falló: {e}");
std::process::exit(1);
}
}
let run_options = build_run_options(&args.pipeline, args.common.quiet);
let resume_state = if args.pipeline.resume {
checkpoint::OrchestratorState::load(project_root)
} else {
None
};
match orchestrator::run(project_root, &cfg, &run_options, resume_state) {
Ok(report) => {
tracing::info!(
"Pipeline completado: {} total, {} done, {} failed, {} iteraciones, {}s",
report.total,
report.done,
report.failed,
report.iterations,
report.elapsed.as_secs()
);
std::process::exit(exit_code_from_report(&report));
}
Err(e) => {
tracing::error!("Pipeline falló: {e}");
std::process::exit(1);
}
}
}
if args.common.dry_run {
setup_user_tracing(args.common.quiet, false, None);
let cfg = load_config(
project_root,
args.common.config.as_deref(),
args.common.provider.as_deref(),
);
match plan::run(
project_root,
Path::new(&args.plan_mode.spec),
&cfg,
args.plan_mode.max_stories,
args.plan_mode.replace,
) {
Ok(plan_result) => {
println!(
"✅ Groom: {} historias, {} épicas",
plan_result.stories_created, plan_result.epics_created
);
if !plan_result.dependencies_clean {
println!("⚠️ Dependencias con errores. Omitiendo pipeline.");
return;
}
if plan_result.stories_created == 0 {
println!("⚠️ Sin historias. Omitiendo pipeline.");
return;
}
}
Err(e) => {
eprintln!("❌ Groom falló: {e}");
std::process::exit(1);
}
}
let run_options = build_run_options(&args.pipeline, args.common.quiet);
match orchestrator::run(project_root, &cfg, &run_options, None) {
Ok(report) => print_pipeline_summary(&report),
Err(e) => {
eprintln!("❌ Pipeline falló: {e}");
std::process::exit(1);
}
}
return;
}
let child_args = build_daemon_args(
"auto",
&args.repo.dir,
Some(&args.plan_mode.spec),
args.plan_mode.replace,
args.plan_mode.max_stories,
&args.pipeline,
&args.common,
);
spawn_and_optionally_follow(project_root, &child_args, args.common.logs);
}
fn handle_run(args: RunArgs) {
let project_root = Path::new(&args.repo.dir);
if args.pipeline.clean_state && !args.daemon.daemon {
checkpoint::OrchestratorState::remove(project_root);
println!("✅ Checkpoint eliminado.");
}
if args.daemon.daemon {
setup_daemon_tracing(args.daemon.log_file.as_deref(), args.common.quiet);
let _cleanup = daemon::PidCleanup(project_root.to_path_buf());
let cfg = load_config(
project_root,
args.common.config.as_deref(),
args.common.provider.as_deref(),
);
let run_options = build_run_options(&args.pipeline, args.common.quiet);
let resume_state = if args.pipeline.resume {
checkpoint::OrchestratorState::load(project_root)
} else {
None
};
match orchestrator::run(project_root, &cfg, &run_options, resume_state) {
Ok(report) => {
tracing::info!(
"Pipeline completado: {} total, {} done, {} failed, {} iteraciones, {}s",
report.total,
report.done,
report.failed,
report.iterations,
report.elapsed.as_secs()
);
std::process::exit(exit_code_from_report(&report));
}
Err(e) => {
tracing::error!("Pipeline falló: {e}");
std::process::exit(1);
}
}
}
if args.common.dry_run {
setup_user_tracing(args.common.quiet, false, None);
let cfg = load_config(
project_root,
args.common.config.as_deref(),
args.common.provider.as_deref(),
);
let run_options = build_run_options(&args.pipeline, args.common.quiet);
match orchestrator::run(project_root, &cfg, &run_options, None) {
Ok(report) => print_pipeline_summary(&report),
Err(e) => {
eprintln!("❌ Pipeline falló: {e}");
std::process::exit(1);
}
}
return;
}
let child_args = build_daemon_args(
"run",
&args.repo.dir,
None,
false,
0,
&args.pipeline,
&args.common,
);
spawn_and_optionally_follow(project_root, &child_args, args.common.logs);
}
fn handle_logs(args: RepoArgs) {
let project_root = Path::new(&args.dir);
if let Err(e) = daemon::follow(project_root) {
eprintln!("Error: {e}");
std::process::exit(1);
}
}
fn handle_status(args: RepoArgs) {
let project_root = Path::new(&args.dir);
match daemon::status(project_root) {
Ok(msg) => println!("{msg}"),
Err(e) => {
eprintln!("Error: {e}");
std::process::exit(1);
}
}
}
fn handle_kill(args: RepoArgs) {
let project_root = Path::new(&args.dir);
match daemon::kill(project_root) {
Ok(msg) => println!("{msg}"),
Err(e) => {
eprintln!("Error: {e}");
std::process::exit(1);
}
}
}
fn handle_validate(args: ValidateArgs) {
let project_root = Path::new(&args.repo.dir);
let config_path = args.config.as_deref().map(Path::new);
let result = validator::validate(project_root, config_path);
if args.json {
println!(
"{}",
serde_json::to_string_pretty(&result).unwrap_or_else(|_| "{}".into())
);
} else {
if result.findings.is_empty() {
println!("✅ Todo OK — el proyecto está listo para ejecutar el pipeline.");
} else {
for finding in &result.findings {
let icon = match finding.severity {
validator::Severity::Error => "❌",
validator::Severity::Warning => "⚠️",
};
let story = finding
.story_id
.as_deref()
.map(|id| format!(" [{id}]"))
.unwrap_or_default();
println!("{icon} [{}]{} {}", finding.category, story, finding.message);
}
println!(
"\nResultado: {} errores, {} warnings",
result.errors, result.warnings
);
}
}
if result.errors > 0 {
std::process::exit(1);
} else if result.warnings > 0 {
std::process::exit(2);
}
}
fn handle_init(args: InitArgs) {
let project_root = Path::new(&args.repo.dir);
match init::init(project_root, args.light, args.with_example, &args.provider) {
Ok(result) => {
if !result.created.is_empty() {
println!("Creados:");
for p in &result.created {
println!(" ✅ {p}");
}
}
if !result.skipped.is_empty() {
println!("Saltados (ya existen):");
for p in &result.skipped {
println!(" ⏭️ {p}");
}
}
if !result.errors.is_empty() {
println!("Errores:");
for e in &result.errors {
eprintln!(" ❌ {e}");
}
std::process::exit(1);
}
if result.created.is_empty() && result.skipped.is_empty() {
println!("Nada que hacer.");
} else {
println!("\n✅ Proyecto inicializado en {}", project_root.display());
}
}
Err(e) => {
eprintln!("Error inicializando proyecto: {e}");
std::process::exit(1);
}
}
}
fn handle_update(args: UpdateArgs) {
match update::run(args.yes) {
Ok(()) => {}
Err(e) => {
eprintln!("{e}");
std::process::exit(1);
}
}
}
fn setup_daemon_tracing(log_file: Option<&str>, quiet: bool) {
let env_filter = if quiet {
tracing_subscriber::EnvFilter::new("error")
} else {
tracing_subscriber::EnvFilter::try_from_default_env()
.unwrap_or_else(|_| tracing_subscriber::EnvFilter::new("info"))
};
let subscriber = tracing_subscriber::fmt().with_env_filter(env_filter);
if let Some(path) = log_file {
let file = std::fs::OpenOptions::new()
.create(true)
.append(true)
.open(path)
.expect("No se pudo crear/abrir el archivo de log del daemon");
subscriber.with_writer(std::sync::Mutex::new(file)).init();
} else {
subscriber.with_writer(std::io::stderr).init();
}
}
fn setup_user_tracing(quiet: bool, _json: bool, _log_file: Option<&str>) {
let env_filter = if quiet {
tracing_subscriber::EnvFilter::new("error")
} else {
tracing_subscriber::EnvFilter::try_from_default_env()
.unwrap_or_else(|_| tracing_subscriber::EnvFilter::new("info"))
};
tracing_subscriber::fmt()
.with_env_filter(env_filter)
.with_writer(std::io::stderr)
.init();
}
fn load_config(
project_root: &Path,
config_path: Option<&str>,
provider_override: Option<&str>,
) -> config::Config {
let config_path_opt = config_path.map(Path::new);
let mut cfg = match config::Config::load(project_root, config_path_opt) {
Ok(c) => c,
Err(e) => {
tracing::error!("Error al cargar configuración: {e}");
std::process::exit(1);
}
};
if let Some(provider) = provider_override {
cfg.agents.provider = provider.to_string();
tracing::info!("Provider override: {provider}");
}
cfg
}
fn build_run_options(pipeline: &PipelineArgs, quiet: bool) -> orchestrator::RunOptions {
let epics_range = pipeline.epics.as_ref().and_then(|range| {
let parts: Vec<&str> = range.split("..").collect();
if parts.len() == 2 {
Some((
parts[0].trim().to_uppercase(),
parts[1].trim().to_uppercase(),
))
} else {
tracing::warn!("Formato de rango de épicas inválido: '{range}'. Ignorando.");
None
}
});
orchestrator::RunOptions {
once: pipeline.once,
story_filter: pipeline.story.clone(),
epic_filter: pipeline.epic.clone(),
epics_range,
dry_run: false, quiet,
}
}
fn build_daemon_args(
subcommand: &str,
dir: &str,
spec: Option<&str>,
replace: bool,
max_stories: u32,
pipeline: &PipelineArgs,
common: &CommonArgs,
) -> Vec<String> {
let log_path = format!("{dir}/.regista/daemon.log");
let mut args = vec![subcommand.to_string()];
if let Some(s) = spec {
args.push(s.to_string());
}
args.push(dir.to_string());
args.push("--daemon".to_string());
args.push("--log-file".to_string());
args.push(log_path);
if replace {
args.push("--replace".to_string());
}
if max_stories > 0 {
args.push("--max-stories".to_string());
args.push(max_stories.to_string());
}
if let Some(ref s) = pipeline.story {
args.push("--story".to_string());
args.push(s.clone());
}
if let Some(ref e) = pipeline.epic {
args.push("--epic".to_string());
args.push(e.clone());
}
if let Some(ref e) = pipeline.epics {
args.push("--epics".to_string());
args.push(e.clone());
}
if pipeline.once {
args.push("--once".to_string());
}
if pipeline.resume {
args.push("--resume".to_string());
}
if let Some(ref c) = common.config {
args.push("--config".to_string());
args.push(c.clone());
}
if let Some(ref p) = common.provider {
args.push("--provider".to_string());
args.push(p.clone());
}
if common.quiet {
args.push("--quiet".to_string());
}
args
}
fn spawn_and_optionally_follow(project_root: &Path, child_args: &[String], follow_log: bool) {
match daemon::detach(project_root, child_args, None) {
Ok(pid) => {
let log_display = format!("{}/.regista/daemon.log", project_root.display());
println!("🚀 Daemon lanzado (PID: {pid})");
println!(" Log: {log_display}");
println!(" Usa: regista logs, regista status, regista kill");
if follow_log {
if let Err(e) = daemon::follow(project_root) {
eprintln!("Error siguiendo el log: {e}");
}
}
}
Err(e) => {
eprintln!("Error al lanzar el daemon: {e}");
std::process::exit(1);
}
}
}
fn print_pipeline_summary(report: &orchestrator::RunReport) {
if let Some(ref reason) = report.stop_reason {
println!("\n⚠️ Pipeline detenido: {reason}");
} else {
println!("\n🏁 Pipeline completado");
}
println!(" Total: {:>4}", report.total);
println!(" Done: {:>4}", report.done);
println!(" Failed: {:>4}", report.failed);
println!(" Blocked: {:>4}", report.blocked);
println!(" Draft: {:>4}", report.draft);
println!(" Iteraciones: {:>2}", report.iterations);
println!(" Tiempo: {:>3}s", report.elapsed.as_secs());
}
fn exit_code_from_report(report: &orchestrator::RunReport) -> i32 {
if report.stop_reason.is_some() {
3
} else if report.failed > 0 {
2
} else {
0
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn run_defaults() {
let args = Cli::try_parse_from(["regista", "run"]).unwrap();
match args.command {
Commands::Run(r) => {
assert_eq!(r.repo.dir, ".");
assert!(!r.common.dry_run);
assert!(!r.common.logs);
assert!(r.pipeline.story.is_none());
assert!(!r.pipeline.once);
}
_ => panic!("expected Run"),
}
}
#[test]
fn run_with_filters() {
let args = Cli::try_parse_from([
"regista",
"run",
"/tmp/proj",
"--story",
"STORY-001",
"--once",
"--dry-run",
])
.unwrap();
match args.command {
Commands::Run(r) => {
assert_eq!(r.repo.dir, "/tmp/proj");
assert_eq!(r.pipeline.story.unwrap(), "STORY-001");
assert!(r.pipeline.once);
assert!(r.common.dry_run);
}
_ => panic!("expected Run"),
}
}
#[test]
fn run_epic_conflicts_with_epics() {
let err = Cli::try_parse_from([
"regista",
"run",
".",
"--epic",
"EPIC-001",
"--epics",
"EPIC-001..EPIC-003",
])
.unwrap_err();
let msg = err.to_string();
assert!(
msg.contains("--epic") || msg.contains("--epics"),
"expected conflict, got: {msg}"
);
}
#[test]
fn plan_basic() {
let args = Cli::try_parse_from(["regista", "plan", "spec.md"]).unwrap();
match args.command {
Commands::Plan(p) => {
assert_eq!(p.plan_mode.spec, "spec.md");
assert_eq!(p.repo.dir, ".");
assert!(!p.plan_mode.replace);
assert_eq!(p.plan_mode.max_stories, 0);
}
_ => panic!("expected Plan"),
}
}
#[test]
fn plan_with_replace_and_limit() {
let args = Cli::try_parse_from([
"regista",
"plan",
"docs/spec.md",
"--replace",
"--max-stories",
"15",
])
.unwrap();
match args.command {
Commands::Plan(p) => {
assert_eq!(p.plan_mode.spec, "docs/spec.md");
assert!(p.plan_mode.replace);
assert_eq!(p.plan_mode.max_stories, 15);
}
_ => panic!("expected Plan"),
}
}
#[test]
fn plan_with_logs() {
let args = Cli::try_parse_from(["regista", "plan", "spec.md", "--logs"]).unwrap();
match args.command {
Commands::Plan(p) => {
assert!(p.common.logs);
}
_ => panic!("expected Plan"),
}
}
#[test]
fn auto_full() {
let args = Cli::try_parse_from([
"regista",
"auto",
"spec.md",
"--replace",
"--max-stories",
"20",
"--epic",
"EPIC-001",
"--once",
"--logs",
])
.unwrap();
match args.command {
Commands::Auto(a) => {
assert_eq!(a.plan_mode.spec, "spec.md");
assert!(a.plan_mode.replace);
assert_eq!(a.plan_mode.max_stories, 20);
assert_eq!(a.pipeline.epic.unwrap(), "EPIC-001");
assert!(a.pipeline.once);
assert!(a.common.logs);
}
_ => panic!("expected Auto"),
}
}
#[test]
fn logs_subcommand() {
let args = Cli::try_parse_from(["regista", "logs", "/tmp/proj"]).unwrap();
match args.command {
Commands::Logs(l) => assert_eq!(l.dir, "/tmp/proj"),
_ => panic!("expected Logs"),
}
}
#[test]
fn logs_default_dir() {
let args = Cli::try_parse_from(["regista", "logs"]).unwrap();
match args.command {
Commands::Logs(l) => assert_eq!(l.dir, "."),
_ => panic!("expected Logs"),
}
}
#[test]
fn status_subcommand() {
let args = Cli::try_parse_from(["regista", "status"]).unwrap();
assert!(matches!(args.command, Commands::Status(_)));
}
#[test]
fn kill_subcommand() {
let args = Cli::try_parse_from(["regista", "kill", "."]).unwrap();
match args.command {
Commands::Kill(k) => assert_eq!(k.dir, "."),
_ => panic!("expected Kill"),
}
}
#[test]
fn validate_subcommand() {
let args = Cli::try_parse_from(["regista", "validate", ".", "--json"]).unwrap();
match args.command {
Commands::Validate(v) => {
assert_eq!(v.repo.dir, ".");
assert!(v.json);
}
_ => panic!("expected Validate"),
}
}
#[test]
fn init_subcommand() {
let args = Cli::try_parse_from(["regista", "init", ".", "--light", "--provider", "claude"])
.unwrap();
match args.command {
Commands::Init(i) => {
assert_eq!(i.repo.dir, ".");
assert!(i.light);
assert!(!i.with_example);
assert_eq!(i.provider, "claude");
}
_ => panic!("expected Init"),
}
}
#[test]
fn init_with_example() {
let args = Cli::try_parse_from([
"regista",
"init",
"/tmp/newproj",
"--with-example",
"--provider",
"codex",
])
.unwrap();
match args.command {
Commands::Init(i) => {
assert_eq!(i.repo.dir, "/tmp/newproj");
assert!(i.with_example);
assert!(!i.light);
assert_eq!(i.provider, "codex");
}
_ => panic!("expected Init"),
}
}
#[test]
fn build_daemon_args_for_run() {
let pipeline = PipelineArgs {
story: Some("STORY-005".into()),
epic: None,
epics: None,
once: true,
resume: false,
clean_state: false,
};
let common = CommonArgs {
logs: false,
dry_run: false,
config: None,
provider: Some("claude".into()),
quiet: false,
};
let args = build_daemon_args("run", ".", None, false, 0, &pipeline, &common);
assert_eq!(args[0], "run");
assert_eq!(args[1], ".");
assert!(args.contains(&"--daemon".to_string()));
assert!(args.contains(&"--story".to_string()));
assert!(args.contains(&"STORY-005".to_string()));
assert!(args.contains(&"--once".to_string()));
assert!(args.contains(&"--provider".to_string()));
assert!(args.contains(&"claude".to_string()));
assert!(!args.contains(&"--clean-state".to_string()));
}
#[test]
fn build_daemon_args_for_plan() {
let pipeline = PipelineArgs::default();
let common = CommonArgs {
logs: true, dry_run: false,
config: Some("custom.toml".into()),
provider: None,
quiet: true,
};
let args = build_daemon_args(
"plan",
"myproj",
Some("spec.md"),
true,
10,
&pipeline,
&common,
);
assert_eq!(args[0], "plan");
assert_eq!(args[1], "spec.md");
assert_eq!(args[2], "myproj");
assert!(args.contains(&"--replace".to_string()));
assert!(args.contains(&"10".to_string())); assert!(args.contains(&"--config".to_string()));
assert!(args.contains(&"custom.toml".to_string()));
assert!(args.contains(&"--quiet".to_string()));
assert!(!args.contains(&"--logs".to_string()));
}
#[test]
fn exit_code_all_done_is_zero() {
let report = orchestrator::RunReport {
total: 5,
done: 5,
failed: 0,
blocked: 0,
draft: 0,
iterations: 3,
elapsed: std::time::Duration::from_secs(30),
elapsed_seconds: 30,
stop_reason: None,
stories: vec![],
};
assert_eq!(exit_code_from_report(&report), 0);
}
#[test]
fn exit_code_with_failures_is_2() {
let report = orchestrator::RunReport {
total: 5,
done: 3,
failed: 2,
blocked: 0,
draft: 0,
iterations: 5,
elapsed: std::time::Duration::from_secs(60),
elapsed_seconds: 60,
stop_reason: None,
stories: vec![],
};
assert_eq!(exit_code_from_report(&report), 2);
}
#[test]
fn exit_code_stopped_early_is_3() {
let report = orchestrator::RunReport {
total: 10,
done: 2,
failed: 0,
blocked: 5,
draft: 3,
iterations: 50,
elapsed: std::time::Duration::from_secs(600),
elapsed_seconds: 600,
stop_reason: Some("max_iterations".into()),
stories: vec![],
};
assert_eq!(exit_code_from_report(&report), 3);
}
}