use std::path::PathBuf;
use std::process::Command;
fn binary() -> PathBuf {
let mut p = std::env::current_exe().expect("current exe");
p.pop(); if p.ends_with("deps") {
p.pop();
}
p.push("agent-exec");
if cfg!(windows) {
p.set_extension("exe");
}
p
}
struct TestHarness {
_tmp: tempfile::TempDir,
root: String,
}
impl TestHarness {
fn new() -> Self {
let tmp = tempfile::tempdir().expect("create tempdir");
let root = tmp
.path()
.to_str()
.expect("tempdir path is valid UTF-8")
.to_string();
Self { _tmp: tmp, root }
}
fn root(&self) -> &str {
&self.root
}
fn run(&self, args: &[&str]) -> serde_json::Value {
run_cmd_with_root(args, Some(&self.root))
}
}
fn run_cmd_with_root(args: &[&str], root: Option<&str>) -> serde_json::Value {
let bin = binary();
let mut cmd = Command::new(&bin);
cmd.args(args);
if let Some(r) = root {
cmd.env("AGENT_EXEC_ROOT", r);
}
let output = cmd.output().expect("run binary");
let stdout = String::from_utf8_lossy(&output.stdout);
let stderr = String::from_utf8_lossy(&output.stderr);
assert!(
!stdout.trim().is_empty(),
"stdout is empty (stderr: {stderr})\nargs: {args:?}"
);
serde_json::from_str(stdout.trim()).unwrap_or_else(|e| {
panic!("stdout is not valid JSON: {e}\nstdout: {stdout}\nstderr: {stderr}\nargs: {args:?}")
})
}
fn assert_usage_error(args: &[&str], root: Option<&str>) {
let bin = binary();
let mut cmd = Command::new(&bin);
cmd.args(args);
if let Some(r) = root {
cmd.env("AGENT_EXEC_ROOT", r);
}
let output = cmd.output().expect("run binary");
let stdout = String::from_utf8_lossy(&output.stdout);
let stderr = String::from_utf8_lossy(&output.stderr);
assert_eq!(
output.status.code(),
Some(2),
"expected exit code 2 (usage error)\nstdout: {stdout}\nstderr: {stderr}\nargs: {args:?}"
);
assert!(
stdout.trim().is_empty(),
"expected empty stdout for usage error\nstdout: {stdout}\nstderr: {stderr}\nargs: {args:?}"
);
}
fn run_cmd_with_global_root_flag(root: &str, args: &[&str]) -> serde_json::Value {
let bin = binary();
let mut cmd = Command::new(&bin);
cmd.arg("--root").arg(root);
cmd.args(args);
cmd.env_remove("AGENT_EXEC_ROOT");
let output = cmd.output().expect("run binary");
let stdout = String::from_utf8_lossy(&output.stdout);
let stderr = String::from_utf8_lossy(&output.stderr);
assert!(
!stdout.trim().is_empty(),
"stdout is empty (stderr: {stderr})\nargs: {args:?}"
);
serde_json::from_str(stdout.trim()).unwrap_or_else(|e| {
panic!("stdout is not valid JSON: {e}\nstdout: {stdout}\nstderr: {stderr}\nargs: {args:?}")
})
}
fn run_cmd_with_subcommand_root_flag(
subcommand: &str,
root: &str,
extra_args: &[&str],
) -> serde_json::Value {
let bin = binary();
let mut cmd = Command::new(&bin);
cmd.arg(subcommand);
cmd.arg("--root").arg(root);
cmd.args(extra_args);
cmd.env_remove("AGENT_EXEC_ROOT");
let output = cmd.output().expect("run binary");
let stdout = String::from_utf8_lossy(&output.stdout);
let stderr = String::from_utf8_lossy(&output.stderr);
assert!(
!stdout.trim().is_empty(),
"stdout is empty (stderr: {stderr})\nsubcommand: {subcommand}, root: {root}, extra: {extra_args:?}"
);
serde_json::from_str(stdout.trim()).unwrap_or_else(|e| {
panic!(
"stdout is not valid JSON: {e}\nstdout: {stdout}\nstderr: {stderr}\nsubcommand: {subcommand}"
)
})
}
fn assert_envelope(v: &serde_json::Value, expected_type: &str, expected_ok: bool) {
assert_eq!(
v["schema_version"].as_str().unwrap_or(""),
"0.1",
"schema_version mismatch: {v}"
);
assert_eq!(
v["ok"].as_bool().unwrap_or(!expected_ok),
expected_ok,
"ok mismatch: {v}"
);
assert_eq!(
v["type"].as_str().unwrap_or(""),
expected_type,
"type mismatch: {v}"
);
}
#[test]
fn run_returns_json_with_job_id() {
let h = TestHarness::new();
let v = h.run(&["run", "--snapshot-after", "0", "echo", "hello"]);
assert_envelope(&v, "run", true);
let job_id = v["job_id"].as_str().expect("job_id missing");
assert!(!job_id.is_empty(), "job_id is empty");
assert_eq!(v["state"].as_str().unwrap_or(""), "running");
}
#[test]
fn run_with_snapshot_after_includes_snapshot() {
let h = TestHarness::new();
let v = h.run(&["run", "--snapshot-after", "500", "echo", "snapshot_test"]);
assert_envelope(&v, "run", true);
assert!(v.get("snapshot").is_some(), "snapshot field missing: {v}");
let snapshot = &v["snapshot"];
assert_eq!(snapshot["encoding"].as_str().unwrap_or(""), "utf-8-lossy");
assert!(
snapshot.get("stdout_tail").is_some(),
"snapshot.stdout_tail missing: {snapshot}"
);
assert!(
snapshot.get("stderr_tail").is_some(),
"snapshot.stderr_tail missing: {snapshot}"
);
}
#[test]
fn status_returns_json_for_existing_job() {
let h = TestHarness::new();
let run_v = h.run(&["run", "--snapshot-after", "0", "echo", "hi"]);
let job_id = run_v["job_id"].as_str().unwrap().to_string();
let v = h.run(&["status", &job_id]);
assert_envelope(&v, "status", true);
assert_eq!(v["job_id"].as_str().unwrap_or(""), job_id);
assert!(v.get("state").is_some(), "state missing");
assert!(v.get("started_at").is_some(), "started_at missing");
}
#[test]
fn status_error_for_unknown_job() {
let h = TestHarness::new();
let v = h.run(&["status", "NONEXISTENT_JOB_ID_XYZ"]);
assert!(
!v["ok"].as_bool().unwrap_or(true),
"expected ok=false for unknown job: {v}"
);
assert_eq!(v["type"].as_str().unwrap_or(""), "error");
assert_eq!(
v["error"]["code"].as_str().unwrap_or(""),
"job_not_found",
"expected error.code=job_not_found: {v}"
);
}
#[test]
fn tail_error_for_unknown_job() {
let h = TestHarness::new();
let v = h.run(&["tail", "NONEXISTENT_JOB_ID_XYZ"]);
assert!(!v["ok"].as_bool().unwrap_or(true));
assert_eq!(v["type"].as_str().unwrap_or(""), "error");
assert_eq!(v["error"]["code"].as_str().unwrap_or(""), "job_not_found");
}
#[test]
fn kill_error_for_unknown_job() {
let h = TestHarness::new();
let v = h.run(&["kill", "NONEXISTENT_JOB_ID_XYZ"]);
assert!(!v["ok"].as_bool().unwrap_or(true));
assert_eq!(v["type"].as_str().unwrap_or(""), "error");
assert_eq!(v["error"]["code"].as_str().unwrap_or(""), "job_not_found");
}
#[test]
fn wait_error_for_unknown_job() {
let h = TestHarness::new();
let v = h.run(&["wait", "NONEXISTENT_JOB_ID_XYZ"]);
assert!(!v["ok"].as_bool().unwrap_or(true));
assert_eq!(v["type"].as_str().unwrap_or(""), "error");
assert_eq!(v["error"]["code"].as_str().unwrap_or(""), "job_not_found");
}
#[test]
fn tail_returns_json_with_encoding() {
let h = TestHarness::new();
let run_v = h.run(&["run", "--snapshot-after", "300", "echo", "tail_test"]);
let job_id = run_v["job_id"].as_str().unwrap().to_string();
let v = h.run(&["tail", &job_id]);
assert_envelope(&v, "tail", true);
assert_eq!(v["job_id"].as_str().unwrap_or(""), job_id);
assert_eq!(v["encoding"].as_str().unwrap_or(""), "utf-8-lossy");
assert!(v.get("stdout_tail").is_some(), "stdout_tail missing");
assert!(v.get("stderr_tail").is_some(), "stderr_tail missing");
assert!(v.get("truncated").is_some(), "truncated missing");
}
#[test]
fn wait_returns_json_after_job_finishes() {
let h = TestHarness::new();
let run_v = h.run(&["run", "--snapshot-after", "0", "echo", "done"]);
let job_id = run_v["job_id"].as_str().unwrap().to_string();
let v = h.run(&["wait", "--timeout-ms", "5000", &job_id]);
assert_envelope(&v, "wait", true);
assert_eq!(v["job_id"].as_str().unwrap_or(""), job_id);
assert!(v.get("state").is_some(), "state missing");
}
#[test]
fn wait_default_until_returns_non_terminal_for_long_running_job() {
let h = TestHarness::new();
let run_v = h.run(&["run", "--snapshot-after", "0", "sleep", "60"]);
let job_id = run_v["job_id"].as_str().unwrap().to_string();
let started = std::time::Instant::now();
let v = h.run(&["wait", &job_id]);
let elapsed_ms = started.elapsed().as_millis() as u64;
assert_envelope(&v, "wait", true);
assert!(
elapsed_ms >= 29_000,
"default wait should be ~30s; got {elapsed_ms}ms"
);
let state = v["state"].as_str().unwrap_or("");
assert!(
state == "running" || state == "created",
"wait default deadline should return non-terminal state; got: {state}"
);
assert!(
v.get("exit_code").is_none() || v["exit_code"].is_null(),
"exit_code should be absent/null for non-terminal timeout: {v}"
);
let _ = h.run(&["kill", "--signal", "KILL", &job_id]);
}
#[test]
fn wait_forever_waits_until_terminal() {
let h = TestHarness::new();
let run_v = h.run(&["run", "--snapshot-after", "0", "sh", "-c", "sleep 0.1"]);
let job_id = run_v["job_id"].as_str().unwrap().to_string();
let v = h.run(&["wait", "--forever", &job_id]);
assert_envelope(&v, "wait", true);
let state = v["state"].as_str().unwrap_or("");
assert!(state == "exited" || state == "killed" || state == "failed");
}
#[test]
fn wait_rejects_until_and_forever_together() {
let h = TestHarness::new();
assert_usage_error(
&["wait", "--until", "100", "--forever", "JOBID"],
Some(h.root()),
);
}
#[test]
fn kill_returns_json() {
let h = TestHarness::new();
let run_v = h.run(&["run", "--snapshot-after", "0", "sleep", "60"]);
let job_id = run_v["job_id"].as_str().unwrap().to_string();
std::thread::sleep(std::time::Duration::from_millis(200));
let v = h.run(&["kill", "--signal", "KILL", &job_id]);
assert_envelope(&v, "kill", true);
assert_eq!(v["job_id"].as_str().unwrap_or(""), job_id);
assert!(v.get("signal").is_some(), "signal missing");
}
#[test]
fn kill_signal_non_listed_value_accepted_by_clap() {
let bin = binary();
let output = std::process::Command::new(&bin)
.args(["kill", "--signal", "QUIT", "NONEXISTENT_JOB_ID_XYZ"])
.output()
.expect("run binary");
let code = output.status.code().unwrap_or(-1);
assert_ne!(
code, 2,
"exit code 2 means clap rejected 'QUIT' as a usage error; it should be accepted"
);
assert_ne!(code, 0, "expected non-zero exit code for unknown job id");
}
#[test]
fn run_creates_full_log() {
let h = TestHarness::new();
let run_v = h.run(&["run", "--snapshot-after", "400", "echo", "full_log_test"]);
let job_id = run_v["job_id"].as_str().unwrap().to_string();
let full_log = std::path::Path::new(h.root())
.join(&job_id)
.join("full.log");
assert!(
full_log.exists(),
"full.log not found at {}",
full_log.display()
);
}
#[test]
fn run_creates_all_log_files_immediately() {
let h = TestHarness::new();
let run_v = h.run(&["run", "--snapshot-after", "0", "echo", "log_files_test"]);
let job_id = run_v["job_id"].as_str().unwrap().to_string();
let job_path = std::path::Path::new(h.root()).join(&job_id);
for log_file in &["stdout.log", "stderr.log", "full.log"] {
let p = job_path.join(log_file);
assert!(
p.exists(),
"{log_file} not found at {} immediately after run",
p.display()
);
}
}
#[test]
fn state_json_required_fields_present_with_null_for_options() {
let h = TestHarness::new();
let run_v = h.run(&["run", "--snapshot-after", "0", "echo", "state_test"]);
let job_id = run_v["job_id"].as_str().unwrap().to_string();
let state_path = std::path::Path::new(h.root())
.join(&job_id)
.join("state.json");
assert!(state_path.exists(), "state.json not found");
let raw = std::fs::read_to_string(&state_path).unwrap();
let state: serde_json::Value = serde_json::from_str(&raw).unwrap();
let job = state.get("job").expect("job block missing from state.json");
assert!(job.get("id").is_some(), "job.id missing from state.json");
assert!(
job.get("status").is_some(),
"job.status missing from state.json"
);
assert!(
job.get("started_at").is_some(),
"job.started_at missing from state.json"
);
let result = state
.get("result")
.expect("result block missing from state.json");
assert!(
result.get("exit_code").is_some(),
"result.exit_code missing from state.json (must be null)"
);
assert!(
result.get("signal").is_some(),
"result.signal missing from state.json (must be null)"
);
assert!(
result.get("duration_ms").is_some(),
"result.duration_ms missing from state.json (must be null)"
);
assert!(
state.get("updated_at").is_some(),
"updated_at missing from state.json"
);
let exit_code = &result["exit_code"];
let signal = &result["signal"];
let duration_ms = &result["duration_ms"];
assert!(
exit_code.is_null() || exit_code.is_number(),
"result.exit_code must be null or number, got {exit_code}"
);
assert!(
signal.is_null() || signal.is_string(),
"result.signal must be null or string, got {signal}"
);
assert!(
duration_ms.is_null() || duration_ms.is_number(),
"result.duration_ms must be null or number, got {duration_ms}"
);
}
#[test]
fn all_commands_use_schema_version_0_1() {
assert_eq!(agent_exec::schema::SCHEMA_VERSION, "0.1");
}
#[test]
fn error_response_has_retryable_field() {
let h = TestHarness::new();
let v = h.run(&["status", "NONEXISTENT_JOB_CONTRACT_TEST"]);
let error = v.get("error").expect("error object missing");
assert!(error.get("code").is_some(), "error.code missing: {error}");
assert!(
error.get("message").is_some(),
"error.message missing: {error}"
);
assert!(
error.get("retryable").is_some(),
"error.retryable missing (required by spec): {error}"
);
assert!(
!error["retryable"].as_bool().unwrap_or(true),
"job_not_found should have retryable=false: {error}"
);
}
#[test]
fn status_unknown_job_exits_with_code_1() {
let h = TestHarness::new();
let bin = binary();
let output = std::process::Command::new(&bin)
.env("AGENT_EXEC_ROOT", h.root())
.args(["status", "NONEXISTENT_EXIT_CODE_TEST"])
.output()
.expect("run binary");
assert_eq!(
output.status.code(),
Some(1),
"expected exit code 1 for unknown job"
);
}
#[test]
fn invalid_subcommand_exits_with_code_2() {
let bin = binary();
let output = std::process::Command::new(&bin)
.args(["__no_such_subcommand__"])
.output()
.expect("run binary");
assert_eq!(
output.status.code(),
Some(2),
"expected exit code 2 for invalid subcommand"
);
}
#[test]
fn run_with_double_dash_separator() {
let h = TestHarness::new();
let v = h.run(&["run", "--snapshot-after", "0", "--", "echo", "hello_dash"]);
assert_envelope(&v, "run", true);
let job_id = v["job_id"].as_str().expect("job_id missing");
assert!(!job_id.is_empty(), "job_id is empty");
}
#[test]
fn stdout_is_single_json_object() {
let h = TestHarness::new();
let bin = binary();
let output = std::process::Command::new(&bin)
.env("AGENT_EXEC_ROOT", h.root())
.args(["status", "NONEXISTENT_STDOUT_JSON_TEST"])
.output()
.expect("run binary");
let stdout = String::from_utf8_lossy(&output.stdout);
let lines: Vec<&str> = stdout.trim().lines().collect();
assert_eq!(
lines.len(),
1,
"stdout should contain exactly 1 line (JSON), got {}: {:?}",
lines.len(),
lines
);
let parsed: serde_json::Value =
serde_json::from_str(lines[0]).expect("stdout line is not valid JSON");
assert!(parsed.is_object(), "stdout JSON is not an object: {parsed}");
}
#[test]
fn stderr_contains_no_json_envelope() {
let h = TestHarness::new();
let bin = binary();
let output = std::process::Command::new(&bin)
.env("AGENT_EXEC_ROOT", h.root())
.env("RUST_LOG", "info")
.args(["status", "NONEXISTENT_STDERR_TEST"])
.output()
.expect("run binary");
let stderr = String::from_utf8_lossy(&output.stderr);
for line in stderr.lines() {
let trimmed = line.trim();
if !trimmed.is_empty() {
assert!(
!trimmed.starts_with('{'),
"stderr contains JSON-like output (should be logs only): {trimmed}"
);
}
}
}
#[test]
fn full_log_has_timestamp_and_stream_tags() {
let h = TestHarness::new();
let run_v = h.run(&[
"run",
"--snapshot-after",
"500",
"echo",
"full_log_format_test",
]);
let job_id = run_v["job_id"].as_str().unwrap().to_string();
let full_log = std::path::Path::new(h.root())
.join(&job_id)
.join("full.log");
std::thread::sleep(std::time::Duration::from_millis(200));
if full_log.exists() {
let contents = std::fs::read_to_string(&full_log).unwrap_or_default();
if !contents.is_empty() {
for line in contents.lines() {
assert!(
line.contains("[STDOUT]") || line.contains("[STDERR]"),
"full.log line missing [STDOUT]/[STDERR] tag: {line}"
);
}
}
}
}
#[test]
fn run_log_path_override() {
let h = TestHarness::new();
let log_path = std::path::Path::new(h.root()).join("custom_full.log");
let log_path_str = log_path.to_str().unwrap();
h.run(&[
"run",
"--snapshot-after",
"500",
"--log",
log_path_str,
"echo",
"log_override_test",
]);
std::thread::sleep(std::time::Duration::from_millis(300));
assert!(
log_path.exists(),
"custom log file not found at {}",
log_path.display()
);
}
#[test]
fn run_env_var_is_applied() {
let h = TestHarness::new();
let run_v = h.run(&[
"run",
"--snapshot-after",
"500",
"--env",
"TEST_KEY_AGENT_EXEC=hello_from_env",
"--",
"sh",
"-c",
"echo $TEST_KEY_AGENT_EXEC",
]);
let job_id = run_v["job_id"].as_str().unwrap().to_string();
std::thread::sleep(std::time::Duration::from_millis(500));
let stdout_log = std::path::Path::new(h.root())
.join(&job_id)
.join("stdout.log");
if stdout_log.exists() {
let contents = std::fs::read_to_string(&stdout_log).unwrap_or_default();
assert!(
contents.contains("hello_from_env"),
"env var not applied; stdout.log: {contents}"
);
}
}
#[test]
fn run_no_inherit_env_clears_env() {
let h = TestHarness::new();
let run_v = h.run(&[
"run",
"--snapshot-after",
"500",
"--no-inherit-env",
"--",
"/bin/sh",
"-c",
"echo INHERITED=$HOME",
]);
let job_id = run_v["job_id"].as_str().unwrap().to_string();
std::thread::sleep(std::time::Duration::from_millis(500));
let stdout_log = std::path::Path::new(h.root())
.join(&job_id)
.join("stdout.log");
if stdout_log.exists() {
let contents = std::fs::read_to_string(&stdout_log).unwrap_or_default();
assert!(
contents.contains("INHERITED=\n") || contents.contains("INHERITED="),
"expected HOME to be empty with --no-inherit-env; stdout.log: {contents}"
);
}
}
#[test]
fn run_timeout_terminates_child() {
let h = TestHarness::new();
let run_v = h.run(&[
"run",
"--snapshot-after",
"0",
"--timeout",
"1000",
"--kill-after",
"1000",
"sleep",
"60",
]);
let job_id = run_v["job_id"].as_str().unwrap().to_string();
let deadline = std::time::Instant::now() + std::time::Duration::from_secs(10);
loop {
std::thread::sleep(std::time::Duration::from_millis(200));
let v = h.run(&["status", &job_id]);
let state = v["state"].as_str().unwrap_or("running");
if state != "running" {
break;
}
assert!(
std::time::Instant::now() < deadline,
"job should have been terminated by timeout; state={state}"
);
}
}
#[test]
fn run_progress_every_updates_state() {
let h = TestHarness::new();
let run_v = h.run(&[
"run",
"--snapshot-after",
"0",
"--progress-every",
"200",
"sleep",
"5",
]);
let job_id = run_v["job_id"].as_str().unwrap().to_string();
std::thread::sleep(std::time::Duration::from_millis(500));
let state_path = std::path::Path::new(h.root())
.join(&job_id)
.join("state.json");
let contents = std::fs::read_to_string(&state_path).unwrap_or_default();
let state: serde_json::Value =
serde_json::from_str(&contents).expect("state.json is not valid JSON");
assert!(
state.get("updated_at").is_some(),
"updated_at missing from state.json: {contents}"
);
h.run(&["kill", "--signal", "KILL", &job_id]);
}
#[test]
fn progress_every_supervise_stops_after_child_exits() {
let h = TestHarness::new();
let run_v = h.run(&[
"run",
"--snapshot-after",
"0",
"--progress-every",
"100",
"--",
"echo",
"done",
]);
let job_id = run_v["job_id"].as_str().unwrap().to_string();
std::thread::sleep(std::time::Duration::from_millis(1500));
let v = h.run(&["status", &job_id]);
let state = v["state"].as_str().unwrap_or("running");
assert_ne!(
state, "running",
"job should not be running after child exits with --progress-every; state={state}, response={v}"
);
}
#[test]
fn inherit_env_and_no_inherit_env_are_mutually_exclusive() {
let h = TestHarness::new();
let bin = binary();
let output = std::process::Command::new(&bin)
.env("AGENT_EXEC_ROOT", h.root())
.args([
"run",
"--inherit-env",
"--no-inherit-env",
"--",
"echo",
"test",
])
.output()
.expect("run binary");
assert_eq!(
output.status.code(),
Some(2),
"expected exit code 2 when both --inherit-env and --no-inherit-env are supplied"
);
}
#[test]
fn mask_replaces_env_var_value_with_stars() {
let h = TestHarness::new();
let run_v = h.run(&[
"run",
"--env",
"SECRET_TOKEN=super_secret_value",
"--mask",
"SECRET_TOKEN",
"--snapshot-after",
"300",
"--",
"echo",
"done",
]);
let job_id = run_v["job_id"].as_str().unwrap().to_string();
std::thread::sleep(std::time::Duration::from_millis(300));
let meta_path = std::path::Path::new(h.root())
.join(&job_id)
.join("meta.json");
assert!(meta_path.exists(), "meta.json not found");
let meta_contents = std::fs::read_to_string(&meta_path).unwrap();
let meta: serde_json::Value =
serde_json::from_str(&meta_contents).expect("meta.json invalid JSON");
let env_vars = meta["env_vars"]
.as_array()
.expect("env_vars missing in meta.json");
let has_masked = env_vars
.iter()
.any(|v| v.as_str() == Some("SECRET_TOKEN=***"));
assert!(
has_masked,
"expected SECRET_TOKEN=*** in meta.json env_vars, got: {meta_contents}"
);
assert!(
!meta_contents.contains("super_secret_value"),
"real secret value should not appear in meta.json: {meta_contents}"
);
}
#[test]
fn run_json_response_includes_masked_env_vars() {
let h = TestHarness::new();
let bin = binary();
let output = std::process::Command::new(&bin)
.env("AGENT_EXEC_ROOT", h.root())
.args([
"run",
"--snapshot-after",
"0",
"--env",
"SECRET=super_secret_run_value",
"--mask",
"SECRET",
"--",
"echo",
"done",
])
.output()
.expect("run binary");
let stdout = String::from_utf8_lossy(&output.stdout);
let v: serde_json::Value = serde_json::from_str(stdout.trim())
.unwrap_or_else(|e| panic!("stdout is not valid JSON: {e}\nstdout: {stdout}"));
assert_envelope(&v, "run", true);
let env_vars = v["env_vars"]
.as_array()
.expect("env_vars missing in run JSON response");
let has_masked = env_vars.iter().any(|v| v.as_str() == Some("SECRET=***"));
assert!(
has_masked,
"expected SECRET=*** in run JSON env_vars, got: {v}"
);
assert!(
!stdout.contains("super_secret_run_value"),
"real secret value should not appear in run JSON stdout: {stdout}"
);
}
#[test]
fn tail_truncated_when_over_limit() {
let h = TestHarness::new();
let run_v = h.run(&[
"run",
"--snapshot-after",
"500",
"--",
"sh",
"-c",
"printf 'line1\\nline2\\nline3\\nline4\\nline5\\n'",
]);
let job_id = run_v["job_id"].as_str().unwrap().to_string();
std::thread::sleep(std::time::Duration::from_millis(300));
let v = h.run(&["tail", "--tail-lines", "2", &job_id]);
assert_envelope(&v, "tail", true);
assert!(
v["truncated"].as_bool().unwrap_or(false),
"expected truncated=true; response: {v}"
);
}
#[test]
fn run_includes_waited_ms_elapsed_ms_and_log_paths() {
let h = TestHarness::new();
let v = h.run(&["run", "--snapshot-after", "300", "echo", "metrics_test"]);
assert_envelope(&v, "run", true);
let waited_ms = v["waited_ms"]
.as_u64()
.expect("waited_ms missing from run response");
let elapsed_ms = v["elapsed_ms"]
.as_u64()
.expect("elapsed_ms missing from run response");
assert!(
elapsed_ms >= waited_ms,
"elapsed_ms ({elapsed_ms}) must be >= waited_ms ({waited_ms})"
);
let stdout_path = v["stdout_log_path"]
.as_str()
.expect("stdout_log_path missing from run response");
let stderr_path = v["stderr_log_path"]
.as_str()
.expect("stderr_log_path missing from run response");
assert!(!stdout_path.is_empty(), "stdout_log_path is empty");
assert!(!stderr_path.is_empty(), "stderr_log_path is empty");
assert!(
std::path::Path::new(stdout_path).is_absolute(),
"stdout_log_path must be absolute: {stdout_path}"
);
assert!(
std::path::Path::new(stderr_path).is_absolute(),
"stderr_log_path must be absolute: {stderr_path}"
);
let snapshot = v
.get("snapshot")
.expect("snapshot missing from run response");
assert!(
snapshot.get("stdout_observed_bytes").is_some(),
"snapshot.stdout_observed_bytes missing: {snapshot}"
);
assert!(
snapshot.get("stderr_observed_bytes").is_some(),
"snapshot.stderr_observed_bytes missing: {snapshot}"
);
assert!(
snapshot.get("stdout_included_bytes").is_some(),
"snapshot.stdout_included_bytes missing: {snapshot}"
);
assert!(
snapshot.get("stderr_included_bytes").is_some(),
"snapshot.stderr_included_bytes missing: {snapshot}"
);
let stdout_observed = snapshot["stdout_observed_bytes"].as_u64().unwrap_or(0);
let stdout_included = snapshot["stdout_included_bytes"].as_u64().unwrap_or(0);
assert!(
stdout_included <= stdout_observed,
"stdout_included_bytes ({stdout_included}) must be <= stdout_observed_bytes ({stdout_observed})"
);
}
#[test]
fn run_without_snapshot_after_has_waited_ms_zero() {
let h = TestHarness::new();
let v = h.run(&["run", "--snapshot-after", "0", "echo", "no_snapshot"]);
assert_envelope(&v, "run", true);
let waited_ms = v["waited_ms"].as_u64().expect("waited_ms missing");
assert_eq!(waited_ms, 0, "waited_ms must be 0 when snapshot-after=0");
let elapsed_ms = v["elapsed_ms"].as_u64().expect("elapsed_ms missing");
assert!(
elapsed_ms < 5000,
"elapsed_ms should be small without wait: {elapsed_ms}"
);
assert!(
v.get("snapshot").is_none() || v["snapshot"].is_null(),
"snapshot should be absent when snapshot-after=0: {v}"
);
}
#[test]
fn tail_includes_log_paths_and_bytes_metrics() {
let h = TestHarness::new();
let run_v = h.run(&["run", "--snapshot-after", "400", "echo", "tail_bytes_test"]);
let job_id = run_v["job_id"].as_str().unwrap().to_string();
std::thread::sleep(std::time::Duration::from_millis(200));
let v = h.run(&["tail", &job_id]);
assert_envelope(&v, "tail", true);
let stdout_path = v["stdout_log_path"]
.as_str()
.expect("stdout_log_path missing from tail response");
let stderr_path = v["stderr_log_path"]
.as_str()
.expect("stderr_log_path missing from tail response");
assert!(!stdout_path.is_empty(), "stdout_log_path is empty");
assert!(!stderr_path.is_empty(), "stderr_log_path is empty");
assert!(
std::path::Path::new(stdout_path).is_absolute(),
"stdout_log_path must be absolute: {stdout_path}"
);
assert!(
std::path::Path::new(stderr_path).is_absolute(),
"stderr_log_path must be absolute: {stderr_path}"
);
assert!(
v.get("stdout_observed_bytes").is_some(),
"stdout_observed_bytes missing from tail response: {v}"
);
assert!(
v.get("stderr_observed_bytes").is_some(),
"stderr_observed_bytes missing from tail response: {v}"
);
assert!(
v.get("stdout_included_bytes").is_some(),
"stdout_included_bytes missing from tail response: {v}"
);
assert!(
v.get("stderr_included_bytes").is_some(),
"stderr_included_bytes missing from tail response: {v}"
);
let stdout_observed = v["stdout_observed_bytes"].as_u64().unwrap_or(0);
let stdout_included = v["stdout_included_bytes"].as_u64().unwrap_or(0);
assert!(
stdout_included <= stdout_observed,
"stdout_included_bytes ({stdout_included}) must be <= stdout_observed_bytes ({stdout_observed})"
);
}
#[test]
fn list_returns_empty_when_root_does_not_exist() {
let h = TestHarness::new();
let nonexistent = std::path::Path::new(h.root()).join("does_not_exist");
let nonexistent_str = nonexistent.to_str().unwrap();
let v = run_cmd_with_root(&["list"], Some(nonexistent_str));
assert_envelope(&v, "list", true);
let jobs = v["jobs"].as_array().expect("jobs missing");
assert!(jobs.is_empty(), "expected empty jobs list; got: {v}");
assert!(
!v["truncated"].as_bool().unwrap_or(true),
"truncated must be false for empty list"
);
}
#[test]
fn list_returns_jobs_sorted_by_started_at_desc() {
let h = TestHarness::new();
let _r1 = h.run(&["run", "--snapshot-after", "0", "echo", "job1"]);
std::thread::sleep(std::time::Duration::from_millis(10));
let r2 = h.run(&["run", "--snapshot-after", "0", "echo", "job2"]);
let job2_id = r2["job_id"].as_str().unwrap().to_string();
let v = h.run(&["list"]);
assert_envelope(&v, "list", true);
let jobs = v["jobs"].as_array().expect("jobs missing");
assert!(jobs.len() >= 2, "expected at least 2 jobs; got: {v}");
let first_id = jobs[0]["job_id"].as_str().unwrap_or("");
assert_eq!(
first_id, job2_id,
"expected most recent job first; got: {v}"
);
for job in jobs {
assert!(job.get("job_id").is_some(), "job_id missing in job summary");
assert!(job.get("state").is_some(), "state missing in job summary");
assert!(
job.get("started_at").is_some(),
"started_at missing in job summary"
);
}
}
#[test]
fn list_limit_truncates_result() {
let h = TestHarness::new();
h.run(&["run", "--snapshot-after", "0", "echo", "j1"]);
std::thread::sleep(std::time::Duration::from_millis(10));
h.run(&["run", "--snapshot-after", "0", "echo", "j2"]);
std::thread::sleep(std::time::Duration::from_millis(10));
h.run(&["run", "--snapshot-after", "0", "echo", "j3"]);
let v = h.run(&["list", "--limit", "2"]);
assert_envelope(&v, "list", true);
let jobs = v["jobs"].as_array().expect("jobs missing");
assert_eq!(jobs.len(), 2, "expected 2 jobs due to --limit 2; got: {v}");
assert!(
v["truncated"].as_bool().unwrap_or(false),
"truncated must be true when result is truncated; got: {v}"
);
}
#[test]
fn list_response_contains_root_field() {
let h = TestHarness::new();
let v = h.run(&["list"]);
assert_envelope(&v, "list", true);
let resp_root = v["root"].as_str().expect("root missing in list response");
assert!(!resp_root.is_empty(), "root field is empty");
}
#[test]
fn list_filters_by_state_running() {
let h = TestHarness::new();
let long_run = h.run(&["run", "--snapshot-after", "0", "sleep", "60"]);
let long_job_id = long_run["job_id"]
.as_str()
.expect("job_id missing")
.to_string();
let short_run = h.run(&["run", "--snapshot-after", "500", "echo", "done"]);
let short_job_id = short_run["job_id"]
.as_str()
.expect("job_id missing")
.to_string();
h.run(&["wait", "--timeout-ms", "5000", &short_job_id]);
let v = h.run(&["list", "--state", "running"]);
assert_envelope(&v, "list", true);
let jobs = v["jobs"].as_array().expect("jobs missing");
let has_long = jobs
.iter()
.any(|j| j["job_id"].as_str() == Some(&long_job_id));
let has_short = jobs
.iter()
.any(|j| j["job_id"].as_str() == Some(&short_job_id));
assert!(
has_long,
"long-running job should appear in --state running; got: {v}"
);
assert!(
!has_short,
"exited job should NOT appear in --state running; got: {v}"
);
for job in jobs {
let state = job["state"].as_str().unwrap_or("");
assert_eq!(
state, "running",
"unexpected state in --state running result: {state}; job: {job}"
);
}
h.run(&["kill", &long_job_id]);
}
#[test]
fn list_skips_invalid_directories() {
let h = TestHarness::new();
let r = h.run(&["run", "--snapshot-after", "0", "echo", "valid"]);
let valid_job_id = r["job_id"].as_str().unwrap().to_string();
let broken_dir = std::path::Path::new(h.root()).join("broken_job_dir");
std::fs::create_dir_all(&broken_dir).unwrap();
let v = h.run(&["list"]);
assert_envelope(&v, "list", true);
let jobs = v["jobs"].as_array().expect("jobs missing");
let has_valid = jobs
.iter()
.any(|j| j["job_id"].as_str() == Some(&valid_job_id));
assert!(has_valid, "valid job not found in list; got: {v}");
let skipped = v["skipped"]
.as_u64()
.expect("skipped missing in list response");
assert!(
skipped >= 1,
"expected skipped >= 1 for broken directory; got: {v}"
);
}
#[test]
fn run_snapshot_after_is_clamped_to_10_seconds() {
let h = TestHarness::new();
let v = h.run(&["run", "--snapshot-after", "20000", "echo", "clamp_test"]);
assert_envelope(&v, "run", true);
let waited_ms = v["waited_ms"].as_u64().expect("waited_ms missing");
assert!(
waited_ms <= 10_500,
"waited_ms ({waited_ms}) must be <= 10,500 when snapshot-after is clamped to 10,000ms"
);
assert!(
waited_ms < 15_000,
"waited_ms ({waited_ms}) indicates snapshot-after was NOT clamped (expected < 15,000ms)"
);
}
#[test]
fn run_default_includes_snapshot() {
let h = TestHarness::new();
let v = h.run(&["run", "echo", "default_snapshot_test"]);
assert_envelope(&v, "run", true);
assert!(
v.get("snapshot").is_some() && !v["snapshot"].is_null(),
"snapshot should be present in default run response: {v}"
);
let snapshot = &v["snapshot"];
assert_eq!(
snapshot["encoding"].as_str().unwrap_or(""),
"utf-8-lossy",
"snapshot encoding must be utf-8-lossy"
);
assert!(
snapshot.get("stdout_tail").is_some(),
"snapshot.stdout_tail must be present"
);
assert!(
snapshot.get("stderr_tail").is_some(),
"snapshot.stderr_tail must be present"
);
let waited_ms = v["waited_ms"].as_u64().expect("waited_ms missing");
assert!(
waited_ms > 0,
"waited_ms must be > 0 with default snapshot_after=10000: {waited_ms}"
);
assert!(
waited_ms <= 10_000,
"waited_ms ({waited_ms}) must be <= 10,000ms with default snapshot_after=10000"
);
let stdout_tail = snapshot["stdout_tail"].as_str().unwrap_or("");
assert!(
stdout_tail.contains("default_snapshot_test"),
"snapshot.stdout_tail should contain 'default_snapshot_test'; got: {stdout_tail:?}"
);
}
#[test]
fn run_snapshot_captures_output_without_newline() {
let h = TestHarness::new();
let v = h.run(&[
"run",
"--snapshot-after",
"400",
"--max-bytes",
"256",
"sh",
"-c",
"printf 'no-newline-output'",
]);
assert_envelope(&v, "run", true);
let snapshot = v.get("snapshot").expect("snapshot must be present");
let stdout_tail = snapshot["stdout_tail"].as_str().unwrap_or("");
assert!(
stdout_tail.contains("no-newline-output"),
"snapshot.stdout_tail should contain 'no-newline-output' even without trailing newline; got: {stdout_tail:?}"
);
}
#[test]
fn snapshot_after_waits_until_deadline_despite_early_output() {
let h = TestHarness::new();
let v = h.run(&[
"run",
"--snapshot-after",
"200",
"sh",
"-c",
"printf 'hello\\n'; sleep 5",
]);
assert_envelope(&v, "run", true);
let waited_ms = v["waited_ms"]
.as_u64()
.expect("waited_ms missing from run response");
assert!(
waited_ms >= 200,
"waited_ms ({waited_ms}) must be >= snapshot-after (200ms) even when output \
arrives early; early-output exit was not removed from the polling loop"
);
let snapshot = v.get("snapshot").expect("snapshot must be present");
let stdout_tail = snapshot["stdout_tail"].as_str().unwrap_or("");
assert!(
stdout_tail.contains("hello"),
"snapshot.stdout_tail should contain 'hello'; got: {stdout_tail:?}"
);
}
#[test]
fn run_wait_returns_terminal_state() {
let h = TestHarness::new();
let v = h.run(&[
"run",
"--snapshot-after",
"0",
"--wait",
"echo",
"run_wait_test",
]);
assert_envelope(&v, "run", true);
let state = v["state"].as_str().unwrap_or("");
assert!(
state == "exited" || state == "killed" || state == "failed",
"state must be terminal when --wait is used; got: {state:?}"
);
let finished_at = v["finished_at"].as_str().unwrap_or("");
assert!(
!finished_at.is_empty(),
"finished_at must be present in run --wait response; got: {v}"
);
let final_snapshot = v
.get("final_snapshot")
.expect("final_snapshot must be present in run --wait response");
assert_eq!(
final_snapshot["encoding"].as_str().unwrap_or(""),
"utf-8-lossy",
"final_snapshot.encoding must be 'utf-8-lossy'"
);
assert!(
final_snapshot.get("stdout_tail").is_some(),
"final_snapshot.stdout_tail must be present"
);
assert!(
final_snapshot.get("stderr_tail").is_some(),
"final_snapshot.stderr_tail must be present"
);
let stdout_tail = final_snapshot["stdout_tail"].as_str().unwrap_or("");
assert!(
stdout_tail.contains("run_wait_test"),
"final_snapshot.stdout_tail should contain 'run_wait_test'; got: {stdout_tail:?}"
);
}
#[test]
fn run_wait_returns_exit_code() {
let h = TestHarness::new();
let v = h.run(&[
"run",
"--snapshot-after",
"0",
"--wait",
"sh",
"-c",
"exit 42",
]);
assert_envelope(&v, "run", true);
let state = v["state"].as_str().unwrap_or("");
assert!(
state == "exited" || state == "killed" || state == "failed",
"state must be terminal; got: {state:?}"
);
assert!(
v.get("exit_code").is_some(),
"exit_code must be present in run --wait response; got: {v}"
);
let exit_code = v["exit_code"].as_i64().unwrap_or(-999);
assert_eq!(exit_code, 42, "exit_code must be 42; got: {exit_code}");
assert!(
v["finished_at"].as_str().is_some_and(|s| !s.is_empty()),
"finished_at must be present; got: {v}"
);
assert!(
v.get("final_snapshot").is_some(),
"final_snapshot must be present; got: {v}"
);
}
#[test]
fn run_wait_waited_ms_reflects_actual_wait_time() {
let h = TestHarness::new();
let v = h.run(&[
"run",
"--snapshot-after",
"0",
"--wait",
"sh",
"-c",
"sleep 0.1",
]);
assert_envelope(&v, "run", true);
let waited_ms = v["waited_ms"]
.as_u64()
.expect("waited_ms missing from run --wait response");
assert!(
waited_ms > 0,
"waited_ms must be > 0 when --wait is used (job takes ~100ms); got: {waited_ms}"
);
}
#[test]
fn run_wait_skips_snapshot_after_clamp() {
let h = TestHarness::new();
let v = h.run(&[
"run",
"--snapshot-after",
"20000", "--wait",
"echo",
"skip_clamp_test",
]);
assert_envelope(&v, "run", true);
let waited_ms = v["waited_ms"].as_u64().expect("waited_ms missing");
assert!(
waited_ms < 5_000,
"waited_ms ({waited_ms}) must be < 5,000ms: --wait should skip snapshot_after delay"
);
assert!(
v.get("final_snapshot").is_some(),
"final_snapshot must be present in run --wait response; got: {v}"
);
}
#[test]
fn run_wait_default_until_returns_non_terminal_for_long_running_job() {
let h = TestHarness::new();
let started = std::time::Instant::now();
let v = h.run(&["run", "--snapshot-after", "0", "--wait", "sleep", "60"]);
let elapsed_ms = started.elapsed().as_millis() as u64;
assert_envelope(&v, "run", true);
assert!(
elapsed_ms >= 29_000,
"default run --wait should be ~30s; got {elapsed_ms}ms"
);
let state = v["state"].as_str().unwrap_or("");
assert!(
state == "running" || state == "created",
"run --wait default deadline should return non-terminal state; got: {state}"
);
assert!(
v.get("final_snapshot").is_none(),
"final_snapshot should be absent on deadline timeout; got: {v}"
);
}
#[test]
fn run_wait_until_overrides_default_deadline() {
let h = TestHarness::new();
let started = std::time::Instant::now();
let v = h.run(&[
"run",
"--snapshot-after",
"0",
"--wait",
"--until",
"100",
"sleep",
"60",
]);
let elapsed_ms = started.elapsed().as_millis() as u64;
assert_envelope(&v, "run", true);
assert!(
elapsed_ms < 5_000,
"run --wait --until 100 should return quickly"
);
let state = v["state"].as_str().unwrap_or("");
assert!(state == "running" || state == "created");
}
#[test]
fn run_wait_forever_waits_until_terminal() {
let h = TestHarness::new();
let v = h.run(&[
"run",
"--snapshot-after",
"0",
"--wait",
"--forever",
"sh",
"-c",
"sleep 0.1",
]);
assert_envelope(&v, "run", true);
let state = v["state"].as_str().unwrap_or("");
assert!(state == "exited" || state == "killed" || state == "failed");
assert!(v.get("final_snapshot").is_some());
}
#[test]
fn run_wait_rejects_until_and_forever_together() {
let h = TestHarness::new();
assert_usage_error(
&[
"run",
"--wait",
"--until",
"100",
"--forever",
"echo",
"invalid",
],
Some(h.root()),
);
}
#[test]
fn run_rejects_until_without_wait() {
let h = TestHarness::new();
assert_usage_error(
&["run", "--until", "100", "echo", "invalid"],
Some(h.root()),
);
}
#[test]
fn run_rejects_forever_without_wait() {
let h = TestHarness::new();
assert_usage_error(&["run", "--forever", "echo", "invalid"], Some(h.root()));
}
#[test]
fn run_without_wait_omits_wait_fields() {
let h = TestHarness::new();
let v = h.run(&["run", "--snapshot-after", "0", "echo", "no_wait"]);
assert_envelope(&v, "run", true);
assert!(
v.get("finished_at").is_none(),
"finished_at must NOT be present when --wait is not used; got: {v}"
);
assert!(
v.get("final_snapshot").is_none(),
"final_snapshot must NOT be present when --wait is not used; got: {v}"
);
assert!(
v.get("exit_code").is_none(),
"exit_code must NOT be present when --wait is not used; got: {v}"
);
}
fn run_cmd_with_root_and_cwd(
args: &[&str],
root: Option<&str>,
cwd: Option<&std::path::Path>,
) -> (serde_json::Value, std::process::ExitStatus) {
let bin = binary();
let mut cmd = std::process::Command::new(&bin);
cmd.args(args);
if let Some(r) = root {
cmd.env("AGENT_EXEC_ROOT", r);
}
if let Some(d) = cwd {
cmd.current_dir(d);
}
let output = cmd.output().expect("run binary");
let stdout = String::from_utf8_lossy(&output.stdout);
let stderr = String::from_utf8_lossy(&output.stderr);
let value = if stdout.trim().is_empty() {
serde_json::json!({})
} else {
serde_json::from_str(stdout.trim()).unwrap_or_else(|e| {
panic!(
"stdout is not valid JSON: {e}\nstdout: {stdout}\nstderr: {stderr}\nargs: {args:?}"
)
})
};
(value, output.status)
}
#[test]
fn list_default_filters_by_caller_cwd() {
let h = TestHarness::new();
let dir_a = tempfile::tempdir().expect("create dir_a");
let dir_b = tempfile::tempdir().expect("create dir_b");
let (va, _) = run_cmd_with_root_and_cwd(
&["run", "--snapshot-after", "0", "echo", "job_from_a"],
Some(h.root()),
Some(dir_a.path()),
);
let job_a_id = va["job_id"]
.as_str()
.expect("job_id missing for A")
.to_string();
std::thread::sleep(std::time::Duration::from_millis(10));
let (vb, _) = run_cmd_with_root_and_cwd(
&["run", "--snapshot-after", "0", "echo", "job_from_b"],
Some(h.root()),
Some(dir_b.path()),
);
let job_b_id = vb["job_id"]
.as_str()
.expect("job_id missing for B")
.to_string();
let (list_a, _) = run_cmd_with_root_and_cwd(&["list"], Some(h.root()), Some(dir_a.path()));
assert_envelope(&list_a, "list", true);
let jobs_a = list_a["jobs"].as_array().expect("jobs missing");
let has_a = jobs_a
.iter()
.any(|j| j["job_id"].as_str() == Some(&job_a_id));
let has_b_in_a = jobs_a
.iter()
.any(|j| j["job_id"].as_str() == Some(&job_b_id));
assert!(
has_a,
"Job A should appear when listing from dir_a; list: {list_a}"
);
assert!(
!has_b_in_a,
"Job B should NOT appear when listing from dir_a; list: {list_a}"
);
let (list_b, _) = run_cmd_with_root_and_cwd(&["list"], Some(h.root()), Some(dir_b.path()));
assert_envelope(&list_b, "list", true);
let jobs_b = list_b["jobs"].as_array().expect("jobs missing");
let has_b = jobs_b
.iter()
.any(|j| j["job_id"].as_str() == Some(&job_b_id));
let has_a_in_b = jobs_b
.iter()
.any(|j| j["job_id"].as_str() == Some(&job_a_id));
assert!(
has_b,
"Job B should appear when listing from dir_b; list: {list_b}"
);
assert!(
!has_a_in_b,
"Job A should NOT appear when listing from dir_b; list: {list_b}"
);
}
#[test]
fn list_cwd_flag_filters_by_specified_directory() {
let h = TestHarness::new();
let dir_a = tempfile::tempdir().expect("create dir_a");
let dir_b = tempfile::tempdir().expect("create dir_b");
let (va, _) = run_cmd_with_root_and_cwd(
&["run", "--snapshot-after", "0", "echo", "job_a"],
Some(h.root()),
Some(dir_a.path()),
);
let job_a_id = va["job_id"].as_str().expect("job_id missing").to_string();
std::thread::sleep(std::time::Duration::from_millis(10));
let (vb, _) = run_cmd_with_root_and_cwd(
&["run", "--snapshot-after", "0", "echo", "job_b"],
Some(h.root()),
Some(dir_b.path()),
);
let job_b_id = vb["job_id"].as_str().expect("job_id missing").to_string();
let dir_a_str = dir_a.path().to_str().expect("dir_a path is utf-8");
let (list_v, _) =
run_cmd_with_root_and_cwd(&["list", "--cwd", dir_a_str], Some(h.root()), None);
assert_envelope(&list_v, "list", true);
let jobs = list_v["jobs"].as_array().expect("jobs missing");
let has_a = jobs.iter().any(|j| j["job_id"].as_str() == Some(&job_a_id));
let has_b = jobs.iter().any(|j| j["job_id"].as_str() == Some(&job_b_id));
assert!(
has_a,
"Job A should appear with --cwd dir_a; list: {list_v}"
);
assert!(
!has_b,
"Job B should NOT appear with --cwd dir_a; list: {list_v}"
);
}
#[test]
fn list_all_flag_disables_cwd_filter() {
let h = TestHarness::new();
let dir_a = tempfile::tempdir().expect("create dir_a");
let dir_b = tempfile::tempdir().expect("create dir_b");
let (va, _) = run_cmd_with_root_and_cwd(
&["run", "--snapshot-after", "0", "echo", "job_a"],
Some(h.root()),
Some(dir_a.path()),
);
let job_a_id = va["job_id"].as_str().expect("job_id missing").to_string();
std::thread::sleep(std::time::Duration::from_millis(10));
let (vb, _) = run_cmd_with_root_and_cwd(
&["run", "--snapshot-after", "0", "echo", "job_b"],
Some(h.root()),
Some(dir_b.path()),
);
let job_b_id = vb["job_id"].as_str().expect("job_id missing").to_string();
let (list_v, _) =
run_cmd_with_root_and_cwd(&["list", "--all"], Some(h.root()), Some(dir_a.path()));
assert_envelope(&list_v, "list", true);
let jobs = list_v["jobs"].as_array().expect("jobs missing");
let has_a = jobs.iter().any(|j| j["job_id"].as_str() == Some(&job_a_id));
let has_b = jobs.iter().any(|j| j["job_id"].as_str() == Some(&job_b_id));
assert!(has_a, "Job A should appear with --all; list: {list_v}");
assert!(has_b, "Job B should appear with --all; list: {list_v}");
}
#[test]
fn list_all_and_cwd_conflict_exits_with_code_2() {
let h = TestHarness::new();
let bin = binary();
let output = std::process::Command::new(&bin)
.env("AGENT_EXEC_ROOT", h.root())
.args(["list", "--all", "--cwd", "/tmp"])
.output()
.expect("run binary");
assert_eq!(
output.status.code(),
Some(2),
"expected exit code 2 when --all and --cwd are both supplied; \
stderr: {}",
String::from_utf8_lossy(&output.stderr)
);
}
#[test]
fn schema_returns_json_envelope() {
let v = run_cmd_with_root(&["schema"], None);
assert_envelope(&v, "schema", true);
}
#[test]
fn schema_response_has_schema_format() {
let v = run_cmd_with_root(&["schema"], None);
assert_envelope(&v, "schema", true);
let schema_format = v["schema_format"]
.as_str()
.expect("schema_format missing from schema response");
assert_eq!(
schema_format, "json-schema-draft-07",
"schema_format must be 'json-schema-draft-07'; got: {schema_format}"
);
}
#[test]
fn schema_response_has_schema_object() {
let v = run_cmd_with_root(&["schema"], None);
assert_envelope(&v, "schema", true);
let schema = v
.get("schema")
.expect("schema field missing from schema response");
assert!(
schema.is_object(),
"schema field must be a JSON object; got: {schema}"
);
assert!(
!schema.as_object().unwrap().is_empty(),
"schema field must not be empty; got: {schema}"
);
}
#[test]
fn schema_response_has_generated_at() {
let v = run_cmd_with_root(&["schema"], None);
assert_envelope(&v, "schema", true);
let generated_at = v["generated_at"]
.as_str()
.expect("generated_at missing from schema response");
assert!(
!generated_at.is_empty(),
"generated_at must not be empty; got: {generated_at:?}"
);
}
#[test]
fn schema_stdout_is_single_json_object() {
let bin = binary();
let output = std::process::Command::new(&bin)
.args(["schema"])
.output()
.expect("run binary");
let stdout = String::from_utf8_lossy(&output.stdout);
let lines: Vec<&str> = stdout.trim().lines().collect();
assert_eq!(
lines.len(),
1,
"schema stdout should contain exactly 1 line (JSON), got {}: {:?}",
lines.len(),
lines
);
let parsed: serde_json::Value =
serde_json::from_str(lines[0]).expect("schema stdout line is not valid JSON");
assert!(
parsed.is_object(),
"schema stdout JSON is not an object: {parsed}"
);
}
#[test]
fn install_skills_self_source_succeeds() {
let tmp = tempfile::tempdir().expect("create tempdir");
let agents_dir = tmp.path().join(".agents");
let bin = binary();
let output = std::process::Command::new(&bin)
.args(["install-skills", "--source", "self"])
.current_dir(tmp.path())
.output()
.expect("run binary");
let stdout = String::from_utf8_lossy(&output.stdout);
let stderr = String::from_utf8_lossy(&output.stderr);
assert!(
!stdout.trim().is_empty(),
"stdout is empty (stderr: {stderr})"
);
let v: serde_json::Value =
serde_json::from_str(stdout.trim()).expect("stdout must be valid JSON");
assert_envelope(&v, "install_skills", true);
let skills = v["skills"].as_array().expect("skills must be an array");
assert!(!skills.is_empty(), "skills array must not be empty");
assert_eq!(
skills[0]["name"].as_str().unwrap_or(""),
"agent-exec",
"skills[0].name must be 'agent-exec'; got: {v}"
);
assert_eq!(
skills[0]["source_type"].as_str().unwrap_or(""),
"self",
"skills[0].source_type must be 'self'; got: {v}"
);
assert!(
skills[0]["path"].as_str().is_some(),
"skills[0].path must be present; got: {v}"
);
assert!(
v["lock_file_path"].as_str().is_some(),
"lock_file_path must be present; got: {v}"
);
let skill_dir = agents_dir.join("skills").join("agent-exec");
assert!(
skill_dir.exists(),
"skill directory must exist at {}",
skill_dir.display()
);
assert!(
skill_dir.join("SKILL.md").exists(),
"SKILL.md must exist inside the installed skill directory"
);
assert!(
skill_dir
.join("references")
.join("cli-contract.md")
.exists(),
"cli-contract.md must exist inside the installed skill directory"
);
assert!(
skill_dir
.join("references")
.join("completion-events.md")
.exists(),
"completion-events.md must exist inside the installed skill directory"
);
assert!(
skill_dir.join("references").join("openclaw.md").exists(),
"openclaw.md must exist inside the installed skill directory"
);
let lock_path = agents_dir.join(".skill-lock.json");
assert!(
lock_path.exists(),
"lock file must exist at {}",
lock_path.display()
);
let lock_content = std::fs::read_to_string(&lock_path).expect("read lock file");
let lock: serde_json::Value =
serde_json::from_str(&lock_content).expect("lock file must be valid JSON");
let lock_skills = lock["skills"]
.as_array()
.expect("lock skills must be an array");
assert!(!lock_skills.is_empty(), "lock skills must not be empty");
assert_eq!(
lock_skills[0]["name"].as_str().unwrap_or(""),
"agent-exec",
"lock skills[0].name must be 'agent-exec'"
);
assert!(
lock_skills[0]["path"].as_str().is_some(),
"lock skills[0].path must be present; got: {lock}"
);
assert!(
lock_skills[0]["source_type"].as_str().is_some(),
"lock skills[0].source_type must be present; got: {lock}"
);
}
#[test]
fn install_skills_local_source_succeeds() {
let tmp = tempfile::tempdir().expect("create tempdir");
let fake_skill_dir = tmp.path().join("my-fake-skill");
std::fs::create_dir_all(&fake_skill_dir).expect("create fake skill dir");
std::fs::write(
fake_skill_dir.join("SKILL.md"),
"# Fake Skill\nTest content.",
)
.expect("write fake SKILL.md");
let source_arg = format!("local:{}", fake_skill_dir.display());
let install_root = tempfile::tempdir().expect("create install root");
let agents_dir = install_root.path().join(".agents");
let bin = binary();
let output = std::process::Command::new(&bin)
.args(["install-skills", "--source", &source_arg])
.current_dir(install_root.path())
.output()
.expect("run binary");
let stdout = String::from_utf8_lossy(&output.stdout);
let stderr = String::from_utf8_lossy(&output.stderr);
assert!(
!stdout.trim().is_empty(),
"stdout is empty (stderr: {stderr})"
);
let v: serde_json::Value =
serde_json::from_str(stdout.trim()).expect("stdout must be valid JSON");
assert_envelope(&v, "install_skills", true);
let skills = v["skills"].as_array().expect("skills must be an array");
assert!(!skills.is_empty(), "skills array must not be empty");
assert_eq!(
skills[0]["name"].as_str().unwrap_or(""),
"my-fake-skill",
"skills[0].name must be 'my-fake-skill'; got: {v}"
);
assert!(
skills[0]["source_type"].as_str().is_some(),
"skills[0].source_type must be present; got: {v}"
);
assert!(
skills[0]["path"].as_str().is_some(),
"skills[0].path must be present; got: {v}"
);
let skill_dest = agents_dir.join("skills").join("my-fake-skill");
let exists = skill_dest.exists() || skill_dest.symlink_metadata().is_ok();
assert!(
exists,
"installed skill directory must exist at {}",
skill_dest.display()
);
let lock_path = agents_dir.join(".skill-lock.json");
assert!(
lock_path.exists(),
"lock file must exist at {}",
lock_path.display()
);
let lock_content = std::fs::read_to_string(&lock_path).expect("read lock file");
let lock: serde_json::Value =
serde_json::from_str(&lock_content).expect("lock file must be valid JSON");
let lock_skills = lock["skills"]
.as_array()
.expect("lock skills must be an array");
assert!(!lock_skills.is_empty(), "lock skills must not be empty");
assert_eq!(
lock_skills[0]["name"].as_str().unwrap_or(""),
"my-fake-skill",
"lock skills[0].name must be 'my-fake-skill'"
);
assert!(
lock_skills[0]["path"].as_str().is_some(),
"lock skills[0].path must be present; got: {lock}"
);
assert!(
lock_skills[0]["source_type"].as_str().is_some(),
"lock skills[0].source_type must be present; got: {lock}"
);
}
#[test]
fn install_skills_unknown_source_scheme_returns_error() {
let tmp = tempfile::tempdir().expect("create tempdir");
let bin = binary();
let output = std::process::Command::new(&bin)
.args(["install-skills", "--source", "ftp://example.com/skill"])
.current_dir(tmp.path())
.output()
.expect("run binary");
assert_eq!(
output.status.code(),
Some(1),
"exit code must be 1 for unknown source scheme"
);
let stdout = String::from_utf8_lossy(&output.stdout);
let v: serde_json::Value =
serde_json::from_str(stdout.trim()).expect("stdout must be valid JSON");
assert_envelope(&v, "error", false);
assert_eq!(
v["error"]["code"].as_str().unwrap_or(""),
"unknown_source_scheme",
"error.code must be 'unknown_source_scheme'; got: {v}"
);
}
#[test]
fn notify_file_sink_appends_ndjson_on_job_finish() {
let h = TestHarness::new();
let tmp_dir = tempfile::tempdir().expect("create tempdir");
let events_file = tmp_dir.path().join("events.ndjson");
let events_file_str = events_file.to_str().unwrap();
let v = h.run(&[
"run",
"--notify-file",
events_file_str,
"--wait",
"--",
"echo",
"notify_test",
]);
assert_envelope(&v, "run", true);
let job_id = v["job_id"].as_str().expect("job_id missing").to_string();
assert_eq!(
v["state"].as_str().unwrap_or(""),
"exited",
"state must be exited with --wait"
);
std::thread::sleep(std::time::Duration::from_millis(300));
assert!(
events_file.exists(),
"notify-file {events_file_str} was not created"
);
let content = std::fs::read_to_string(&events_file).expect("read events file");
let lines: Vec<&str> = content.lines().filter(|l| !l.trim().is_empty()).collect();
assert_eq!(
lines.len(),
1,
"expected exactly 1 NDJSON line, got {}",
lines.len()
);
let event: serde_json::Value =
serde_json::from_str(lines[0]).expect("NDJSON line must be valid JSON");
assert_eq!(
event["event_type"].as_str().unwrap_or(""),
"job.finished",
"event_type must be 'job.finished'"
);
assert_eq!(
event["job_id"].as_str().unwrap_or(""),
job_id,
"event job_id must match"
);
assert_eq!(
event["state"].as_str().unwrap_or(""),
"exited",
"event state must be exited"
);
assert!(
event.get("stdout_log_path").is_some(),
"stdout_log_path must be present"
);
assert!(
event.get("stderr_log_path").is_some(),
"stderr_log_path must be present"
);
assert!(
event.get("finished_at").is_some(),
"finished_at must be present"
);
}
#[test]
fn notify_command_sink_receives_event_via_stdin() {
let h = TestHarness::new();
let tmp_dir = tempfile::tempdir().expect("create tempdir");
let captured_file = tmp_dir.path().join("captured.json");
let captured_str = captured_file.to_str().unwrap();
let hook_cmd = format!("cat > {captured_str}");
let v = h.run(&[
"run",
"--notify-command",
&hook_cmd,
"--wait",
"--",
"echo",
"cmd_sink_test",
]);
assert_envelope(&v, "run", true);
let job_id = v["job_id"].as_str().expect("job_id missing").to_string();
std::thread::sleep(std::time::Duration::from_millis(300));
assert!(
captured_file.exists(),
"captured file not created by hook command"
);
let content = std::fs::read_to_string(&captured_file).expect("read captured file");
let event: serde_json::Value =
serde_json::from_str(content.trim()).expect("captured content must be valid JSON");
assert_eq!(
event["event_type"].as_str().unwrap_or(""),
"job.finished",
"event_type must be 'job.finished'"
);
assert_eq!(
event["job_id"].as_str().unwrap_or(""),
job_id,
"event job_id must match"
);
}
#[test]
fn notify_failure_does_not_change_job_state() {
let h = TestHarness::new();
let hook_cmd = "/no/such/binary/agent_exec_test";
let v = h.run(&[
"run",
"--notify-command",
hook_cmd,
"--wait",
"--",
"echo",
"failure_test",
]);
assert_envelope(&v, "run", true);
let job_id = v["job_id"].as_str().expect("job_id missing").to_string();
assert_eq!(
v["state"].as_str().unwrap_or(""),
"exited",
"job state must be exited despite notification failure"
);
std::thread::sleep(std::time::Duration::from_millis(300));
let sv = h.run(&["status", &job_id]);
assert_envelope(&sv, "status", true);
assert_eq!(
sv["state"].as_str().unwrap_or(""),
"exited",
"status must return exited; got: {sv}"
);
let root = h.root();
let completion_event_path = format!("{root}/{job_id}/completion_event.json");
let event_raw = std::fs::read_to_string(&completion_event_path)
.expect("completion_event.json must exist after notification dispatch");
let event: serde_json::Value =
serde_json::from_str(&event_raw).expect("completion_event.json must be valid JSON");
assert_eq!(
event["state"].as_str().unwrap_or(""),
"exited",
"completion_event state must be exited"
);
let results = event["delivery_results"]
.as_array()
.expect("delivery_results must be an array");
assert!(!results.is_empty(), "delivery_results must be non-empty");
assert!(
!results[0]["success"].as_bool().unwrap_or(true),
"delivery must have failed"
);
assert_eq!(
results[0]["sink_type"].as_str().unwrap_or(""),
"command",
"sink_type must be 'command'"
);
}
#[test]
fn shell_wrapper_default_behavior() {
let h = TestHarness::new();
let v = h.run(&["run", "--wait", "--", "echo", "hello_shell_wrapper"]);
assert_envelope(&v, "run", true);
assert_eq!(
v["state"].as_str().unwrap_or(""),
"exited",
"job must exit successfully with default shell wrapper"
);
}
#[test]
fn shell_wrapper_cli_override_with_notify_command() {
let h = TestHarness::new();
let tmp_dir = tempfile::tempdir().expect("create tempdir");
let captured = tmp_dir.path().join("captured.txt");
let captured_str = captured.to_str().unwrap();
let hook_cmd = format!("echo wrapper_used > {captured_str}");
let v = h.run(&[
"run",
"--shell-wrapper",
"sh -lc",
"--notify-command",
&hook_cmd,
"--wait",
"--",
"echo",
"sw_test",
]);
assert_envelope(&v, "run", true);
assert_eq!(v["state"].as_str().unwrap_or(""), "exited");
std::thread::sleep(std::time::Duration::from_millis(300));
assert!(captured.exists(), "hook command output file must exist");
let content = std::fs::read_to_string(&captured).unwrap();
assert!(content.contains("wrapper_used"), "hook must have run");
}
#[test]
fn shell_wrapper_config_file_override() {
let h = TestHarness::new();
let tmp_dir = tempfile::tempdir().expect("create tempdir");
let config_path = tmp_dir.path().join("config.toml");
std::fs::write(
&config_path,
"[shell]\nunix = [\"sh\", \"-lc\"]\nwindows = [\"cmd\", \"/C\"]\n",
)
.unwrap();
let v = h.run(&[
"run",
"--config",
config_path.to_str().unwrap(),
"--wait",
"--",
"echo",
"config_test",
]);
assert_envelope(&v, "run", true);
assert_eq!(
v["state"].as_str().unwrap_or(""),
"exited",
"job must exit with config file shell wrapper"
);
}
#[test]
fn shell_wrapper_cli_takes_precedence_over_config() {
let h = TestHarness::new();
let tmp_dir = tempfile::tempdir().expect("create tempdir");
let config_path = tmp_dir.path().join("config.toml");
std::fs::write(
&config_path,
"[shell]\nunix = [\"sh\", \"-lc\"]\nwindows = [\"cmd\", \"/C\"]\n",
)
.unwrap();
let v = h.run(&[
"run",
"--config",
config_path.to_str().unwrap(),
"--shell-wrapper",
"sh -lc",
"--wait",
"--",
"echo",
"precedence_test",
]);
assert_envelope(&v, "run", true);
assert_eq!(
v["state"].as_str().unwrap_or(""),
"exited",
"job must succeed when CLI wrapper overrides config"
);
}
#[test]
fn shell_wrapper_invalid_config_file_fails() {
let h = TestHarness::new();
let tmp_dir = tempfile::tempdir().expect("create tempdir");
let config_path = tmp_dir.path().join("config.toml");
std::fs::write(&config_path, "this is not valid toml {{{ ").unwrap();
let v = h.run(&[
"run",
"--config",
config_path.to_str().unwrap(),
"--wait",
"--",
"echo",
"should_fail",
]);
assert_envelope(&v, "error", false);
}
#[test]
fn shell_wrapper_shared_between_run_and_notify_command() {
let h = TestHarness::new();
let tmp_dir = tempfile::tempdir().expect("create tempdir");
let captured = tmp_dir.path().join("shared_wrapper.txt");
let captured_str = captured.to_str().unwrap();
let hook_cmd = format!("echo shared_wrapper_ran > {captured_str}");
let v = h.run(&[
"run",
"--shell-wrapper",
"sh -lc",
"--notify-command",
&hook_cmd,
"--wait",
"--",
"echo",
"shared_test",
]);
assert_envelope(&v, "run", true);
assert_eq!(v["state"].as_str().unwrap_or(""), "exited");
std::thread::sleep(std::time::Duration::from_millis(300));
assert!(
captured.exists(),
"notify-command must have run using the configured wrapper"
);
let content = std::fs::read_to_string(&captured).unwrap();
assert!(
content.contains("shared_wrapper_ran"),
"notify-command output must confirm wrapper execution"
);
}
#[test]
fn shell_wrapper_applied_to_run_command_string() {
let h = TestHarness::new();
let v = h.run(&[
"run",
"--wait",
"--",
"echo shell_string_ran && echo second",
]);
assert_envelope(&v, "run", true);
assert_eq!(
v["state"].as_str().unwrap_or(""),
"exited",
"shell command string must execute successfully through the wrapper"
);
}
#[test]
fn shell_wrapper_argv_fidelity_across_run_supervise() {
let h = TestHarness::new();
let v = h.run(&[
"run",
"--shell-wrapper",
"sh -lc",
"--wait",
"--",
"echo",
"fidelity_ok",
]);
assert_envelope(&v, "run", true);
assert_eq!(
v["state"].as_str().unwrap_or(""),
"exited",
"wrapper must be passed to supervisor with argv fidelity"
);
}
fn write_fake_job(
root: &str,
job_id: &str,
status: &str,
finished_at: Option<&str>,
updated_at: &str,
) {
let job_dir = std::path::Path::new(root).join(job_id);
std::fs::create_dir_all(&job_dir).unwrap();
let meta = serde_json::json!({
"job": { "id": job_id },
"schema_version": "0.1",
"command": ["echo", "test"],
"created_at": updated_at,
"root": root,
"env_keys": [],
"env_vars": [],
"mask": []
});
std::fs::write(
job_dir.join("meta.json"),
serde_json::to_string_pretty(&meta).unwrap(),
)
.unwrap();
let mut state_obj = serde_json::json!({
"job": {
"id": job_id,
"status": status,
"started_at": updated_at
},
"result": {
"exit_code": if status == "exited" { serde_json::json!(0) } else { serde_json::Value::Null },
"signal": serde_json::Value::Null,
"duration_ms": serde_json::Value::Null
},
"updated_at": updated_at
});
if let Some(fa) = finished_at {
state_obj["finished_at"] = serde_json::json!(fa);
}
std::fs::write(
job_dir.join("state.json"),
serde_json::to_string_pretty(&state_obj).unwrap(),
)
.unwrap();
std::fs::write(job_dir.join("stdout.log"), b"some output").unwrap();
}
fn assert_gc_envelope(v: &serde_json::Value, dry_run: bool) {
assert_envelope(v, "gc", true);
assert_eq!(
v["dry_run"].as_bool().unwrap_or(!dry_run),
dry_run,
"dry_run mismatch"
);
assert!(v["root"].as_str().is_some(), "root field missing");
assert!(
v["older_than"].as_str().is_some(),
"older_than field missing"
);
assert!(
v["older_than_source"].as_str().is_some(),
"older_than_source field missing"
);
assert!(v["jobs"].is_array(), "jobs must be an array");
}
#[test]
fn gc_empty_root_returns_ok() {
let h = TestHarness::new();
let v = h.run(&["gc", "--older-than", "1d"]);
assert_gc_envelope(&v, false);
assert_eq!(v["deleted"].as_u64().unwrap_or(1), 0);
assert_eq!(v["freed_bytes"].as_u64().unwrap_or(1), 0);
}
#[test]
fn gc_uses_default_30d_window() {
let h = TestHarness::new();
write_fake_job(
h.root(),
"old-job-01",
"exited",
Some("2020-01-01T00:00:00Z"),
"2020-01-01T00:00:00Z",
);
let v = h.run(&["gc"]);
assert_gc_envelope(&v, false);
assert_eq!(
v["older_than_source"].as_str().unwrap_or(""),
"default",
"should report default source"
);
assert_eq!(
v["older_than"].as_str().unwrap_or(""),
"30d",
"should report 30d as default"
);
assert_eq!(
v["deleted"].as_u64().unwrap_or(0),
1,
"old terminal job must be deleted"
);
assert!(
v["freed_bytes"].as_u64().unwrap_or(0) > 0,
"freed_bytes must be > 0"
);
let job_path = std::path::Path::new(h.root()).join("old-job-01");
assert!(!job_path.exists(), "job directory must be deleted");
}
#[test]
fn gc_deletes_only_terminal_jobs() {
let h = TestHarness::new();
let old = "2020-01-01T00:00:00Z";
write_fake_job(h.root(), "exited-old", "exited", Some(old), old);
write_fake_job(h.root(), "killed-old", "killed", Some(old), old);
write_fake_job(h.root(), "failed-old", "failed", Some(old), old);
write_fake_job(h.root(), "running-job", "running", None, old);
let v = h.run(&["gc", "--older-than", "7d"]);
assert_gc_envelope(&v, false);
assert_eq!(
v["deleted"].as_u64().unwrap_or(0),
3,
"three terminal jobs must be deleted"
);
let running_path = std::path::Path::new(h.root()).join("running-job");
assert!(running_path.exists(), "running job must be preserved");
assert!(!std::path::Path::new(h.root()).join("exited-old").exists());
assert!(!std::path::Path::new(h.root()).join("killed-old").exists());
assert!(!std::path::Path::new(h.root()).join("failed-old").exists());
let jobs = v["jobs"].as_array().unwrap();
let running_entry = jobs
.iter()
.find(|j| j["job_id"].as_str().unwrap_or("") == "running-job");
assert!(
running_entry.is_some(),
"running job must appear in jobs array"
);
let running_entry = running_entry.unwrap();
assert_eq!(running_entry["action"].as_str().unwrap_or(""), "skipped");
assert_eq!(running_entry["reason"].as_str().unwrap_or(""), "running");
}
#[test]
fn gc_dry_run_preserves_directories() {
let h = TestHarness::new();
let old = "2020-01-01T00:00:00Z";
write_fake_job(h.root(), "old-exited", "exited", Some(old), old);
let v = h.run(&["gc", "--older-than", "7d", "--dry-run"]);
assert_gc_envelope(&v, true);
assert_eq!(
v["deleted"].as_u64().unwrap_or(1),
0,
"dry-run must not delete"
);
assert!(
v["freed_bytes"].as_u64().unwrap_or(0) > 0,
"freed_bytes must report potential reclaim"
);
assert!(
std::path::Path::new(h.root()).join("old-exited").exists(),
"directory must be preserved in dry-run"
);
let jobs = v["jobs"].as_array().unwrap();
let entry = jobs
.iter()
.find(|j| j["job_id"].as_str().unwrap_or("") == "old-exited");
assert!(entry.is_some());
assert_eq!(
entry.unwrap()["action"].as_str().unwrap_or(""),
"would_delete"
);
}
#[test]
fn gc_skips_jobs_without_gc_timestamp() {
let h = TestHarness::new();
let job_id = "no-ts-job";
let job_dir = std::path::Path::new(h.root()).join(job_id);
std::fs::create_dir_all(&job_dir).unwrap();
let meta = serde_json::json!({
"job": { "id": job_id },
"schema_version": "0.1",
"command": ["echo", "test"],
"created_at": "2020-01-01T00:00:00Z",
"root": h.root(),
"env_keys": [],
"env_vars": [],
"mask": []
});
std::fs::write(
job_dir.join("meta.json"),
serde_json::to_string_pretty(&meta).unwrap(),
)
.unwrap();
let state = serde_json::json!({
"job": {
"id": job_id,
"status": "killed",
"started_at": "2020-01-01T00:00:00Z"
},
"result": {
"exit_code": null,
"signal": "TERM",
"duration_ms": null
},
"updated_at": "2020-01-01T00:00:00Z"
});
std::fs::write(
job_dir.join("state.json"),
serde_json::to_string_pretty(&state).unwrap(),
)
.unwrap();
let v = h.run(&["gc", "--older-than", "7d"]);
assert_gc_envelope(&v, false);
assert_eq!(
v["deleted"].as_u64().unwrap_or(0),
1,
"job with only updated_at should be deleted via fallback"
);
}
#[test]
fn gc_custom_older_than_flag_reported() {
let h = TestHarness::new();
let v = h.run(&["gc", "--older-than", "7d"]);
assert_gc_envelope(&v, false);
assert_eq!(v["older_than"].as_str().unwrap_or(""), "7d");
assert_eq!(v["older_than_source"].as_str().unwrap_or(""), "flag");
}
#[test]
fn gc_skips_unreadable_state() {
let h = TestHarness::new();
let job_dir = std::path::Path::new(h.root()).join("bad-state-job");
std::fs::create_dir_all(&job_dir).unwrap();
let meta = serde_json::json!({
"job": { "id": "bad-state-job" },
"schema_version": "0.1",
"command": ["echo"],
"created_at": "2020-01-01T00:00:00Z",
"root": h.root(),
"env_keys": [],
"env_vars": [],
"mask": []
});
std::fs::write(
job_dir.join("meta.json"),
serde_json::to_string_pretty(&meta).unwrap(),
)
.unwrap();
std::fs::write(job_dir.join("state.json"), b"not valid json").unwrap();
let v = h.run(&["gc", "--older-than", "1d"]);
assert_gc_envelope(&v, false);
let jobs = v["jobs"].as_array().unwrap();
let entry = jobs
.iter()
.find(|j| j["job_id"].as_str().unwrap_or("") == "bad-state-job");
assert!(entry.is_some(), "unreadable job must appear in jobs list");
assert_eq!(entry.unwrap()["action"].as_str().unwrap_or(""), "skipped");
assert_eq!(
entry.unwrap()["reason"].as_str().unwrap_or(""),
"state_unreadable"
);
assert!(
job_dir.exists(),
"directory with unreadable state must be preserved"
);
}
fn run_with_tags(h: &TestHarness, tags: &[&str]) -> serde_json::Value {
let mut args = vec!["run", "--snapshot-after", "0"];
for tag in tags {
args.push("--tag");
args.push(tag);
}
args.extend_from_slice(&["--", "true"]);
h.run(&args)
}
#[test]
fn run_tag_appears_in_response() {
let h = TestHarness::new();
let v = run_with_tags(&h, &["aaa", "bbb"]);
assert_envelope(&v, "run", true);
let tags = v["tags"].as_array().expect("tags must be an array");
let tag_strs: Vec<&str> = tags.iter().map(|t| t.as_str().unwrap()).collect();
assert_eq!(tag_strs, vec!["aaa", "bbb"]);
}
#[test]
fn run_tag_persisted_in_meta() {
let h = TestHarness::new();
let v = run_with_tags(&h, &["hoge", "fuga"]);
assert_envelope(&v, "run", true);
let job_id = v["job_id"].as_str().unwrap();
let meta_path = std::path::Path::new(h.root())
.join(job_id)
.join("meta.json");
let meta_bytes = std::fs::read(&meta_path).expect("meta.json must exist");
let meta: serde_json::Value = serde_json::from_slice(&meta_bytes).unwrap();
let tags = meta["tags"].as_array().expect("tags must be in meta.json");
let tag_strs: Vec<&str> = tags.iter().map(|t| t.as_str().unwrap()).collect();
assert_eq!(tag_strs, vec!["hoge", "fuga"]);
}
#[test]
fn run_tag_deduplication() {
let h = TestHarness::new();
let v = run_with_tags(&h, &["aaa", "bbb", "aaa", "ccc", "bbb"]);
assert_envelope(&v, "run", true);
let tags = v["tags"].as_array().expect("tags must be an array");
let tag_strs: Vec<&str> = tags.iter().map(|t| t.as_str().unwrap()).collect();
assert_eq!(tag_strs, vec!["aaa", "bbb", "ccc"]);
}
#[test]
fn run_no_tags_returns_empty_array() {
let h = TestHarness::new();
let v = h.run(&["run", "--snapshot-after", "0", "--", "true"]);
assert_envelope(&v, "run", true);
let tags = v["tags"].as_array().expect("tags must be an array");
assert!(tags.is_empty(), "tags must be empty when none specified");
}
#[test]
fn run_invalid_tag_is_rejected() {
let h = TestHarness::new();
assert_usage_error(
&[
"run",
"--snapshot-after",
"0",
"--tag",
"bad tag!",
"--",
"true",
],
Some(h.root()),
);
}
#[test]
fn run_wildcard_tag_is_rejected() {
let h = TestHarness::new();
assert_usage_error(
&[
"run",
"--snapshot-after",
"0",
"--tag",
"hoge.*",
"--",
"true",
],
Some(h.root()),
);
}
#[test]
fn tag_set_replaces_tags() {
let h = TestHarness::new();
let run_v = run_with_tags(&h, &["old"]);
let job_id = run_v["job_id"].as_str().unwrap();
let v = h.run(&["tag", "set", job_id, "--tag", "new1", "--tag", "new2"]);
assert_envelope(&v, "tag_set", true);
let tags = v["tags"].as_array().expect("tags must be in response");
let tag_strs: Vec<&str> = tags.iter().map(|t| t.as_str().unwrap()).collect();
assert_eq!(tag_strs, vec!["new1", "new2"]);
let meta_path = std::path::Path::new(h.root())
.join(job_id)
.join("meta.json");
let meta: serde_json::Value =
serde_json::from_slice(&std::fs::read(&meta_path).unwrap()).unwrap();
let stored: Vec<&str> = meta["tags"]
.as_array()
.unwrap()
.iter()
.map(|t| t.as_str().unwrap())
.collect();
assert_eq!(stored, vec!["new1", "new2"]);
}
#[test]
fn tag_set_deduplicates() {
let h = TestHarness::new();
let run_v = run_with_tags(&h, &[]);
let job_id = run_v["job_id"].as_str().unwrap();
let v = h.run(&[
"tag", "set", job_id, "--tag", "a", "--tag", "b", "--tag", "a",
]);
assert_envelope(&v, "tag_set", true);
let tags: Vec<&str> = v["tags"]
.as_array()
.unwrap()
.iter()
.map(|t| t.as_str().unwrap())
.collect();
assert_eq!(tags, vec!["a", "b"]);
}
#[test]
fn tag_set_clears_tags() {
let h = TestHarness::new();
let run_v = run_with_tags(&h, &["keep-me"]);
let job_id = run_v["job_id"].as_str().unwrap();
let v = h.run(&["tag", "set", job_id]);
assert_envelope(&v, "tag_set", true);
let tags = v["tags"].as_array().unwrap();
assert!(tags.is_empty(), "tags must be empty after clear");
}
#[test]
fn tag_set_missing_job_returns_job_not_found() {
let h = TestHarness::new();
let v = h.run(&["tag", "set", "NO_SUCH_JOB_ID", "--tag", "x"]);
assert_envelope(&v, "error", false);
assert_eq!(v["error"]["code"].as_str().unwrap_or(""), "job_not_found");
}
#[test]
fn tag_set_preserves_other_meta_fields() {
let h = TestHarness::new();
let run_v = h.run(&[
"run",
"--snapshot-after",
"0",
"--tag",
"initial",
"--",
"true",
]);
let job_id = run_v["job_id"].as_str().unwrap();
let meta_before: serde_json::Value = serde_json::from_slice(
&std::fs::read(
std::path::Path::new(h.root())
.join(job_id)
.join("meta.json"),
)
.unwrap(),
)
.unwrap();
h.run(&["tag", "set", job_id, "--tag", "after"]);
let meta_after: serde_json::Value = serde_json::from_slice(
&std::fs::read(
std::path::Path::new(h.root())
.join(job_id)
.join("meta.json"),
)
.unwrap(),
)
.unwrap();
assert_eq!(meta_before["job"], meta_after["job"]);
assert_eq!(meta_before["command"], meta_after["command"]);
assert_eq!(meta_before["created_at"], meta_after["created_at"]);
assert_eq!(meta_before["cwd"], meta_after["cwd"]);
assert_ne!(meta_before["tags"], meta_after["tags"]);
}
#[test]
fn list_jobs_include_tags() {
let h = TestHarness::new();
run_with_tags(&h, &["mytag"]);
let v = h.run(&["list", "--all"]);
assert_envelope(&v, "list", true);
let jobs = v["jobs"].as_array().unwrap();
assert!(!jobs.is_empty(), "at least one job expected");
for job in jobs {
assert!(
job["tags"].is_array(),
"job summary must include tags array"
);
}
let has_tag = jobs
.iter()
.any(|j| j["tags"].as_array().unwrap().iter().any(|t| t == "mytag"));
assert!(has_tag, "job with 'mytag' must appear in list");
}
#[test]
fn list_exact_tag_filter() {
let h = TestHarness::new();
run_with_tags(&h, &["alpha"]);
run_with_tags(&h, &["beta"]);
let v = h.run(&["list", "--all", "--tag", "alpha"]);
assert_envelope(&v, "list", true);
let jobs = v["jobs"].as_array().unwrap();
assert!(!jobs.is_empty(), "at least one job expected");
for job in jobs {
let tags: Vec<&str> = job["tags"]
.as_array()
.unwrap()
.iter()
.map(|t| t.as_str().unwrap())
.collect();
assert!(
tags.contains(&"alpha"),
"all returned jobs must have 'alpha'"
);
}
}
#[test]
fn list_prefix_tag_filter() {
let h = TestHarness::new();
run_with_tags(&h, &["ns.sub.job"]);
run_with_tags(&h, &["other.job"]);
let v = h.run(&["list", "--all", "--tag", "ns.*"]);
assert_envelope(&v, "list", true);
let jobs = v["jobs"].as_array().unwrap();
assert!(!jobs.is_empty(), "at least one matching job expected");
for job in jobs {
let tags: Vec<&str> = job["tags"]
.as_array()
.unwrap()
.iter()
.map(|t| t.as_str().unwrap())
.collect();
let matches = tags.iter().any(|t| *t == "ns" || t.starts_with("ns."));
assert!(matches, "all returned jobs must have a 'ns.*' tag");
}
}
#[test]
fn list_multiple_tag_filters_and_semantics() {
let h = TestHarness::new();
run_with_tags(&h, &["x", "y"]); run_with_tags(&h, &["x"]); run_with_tags(&h, &["y"]);
let v = h.run(&["list", "--all", "--tag", "x", "--tag", "y"]);
assert_envelope(&v, "list", true);
let jobs = v["jobs"].as_array().unwrap();
assert_eq!(jobs.len(), 1, "only job with both tags must be returned");
let tags: Vec<&str> = jobs[0]["tags"]
.as_array()
.unwrap()
.iter()
.map(|t| t.as_str().unwrap())
.collect();
assert!(tags.contains(&"x") && tags.contains(&"y"));
}
#[test]
fn list_tag_filter_composes_with_cwd() {
let h = TestHarness::new();
run_with_tags(&h, &["shared"]);
let v = h.run(&["list", "--tag", "shared"]);
assert_envelope(&v, "list", true);
}
#[test]
fn list_invalid_tag_pattern_rejected() {
let h = TestHarness::new();
assert_usage_error(&["list", "--all", "--tag", "bad pattern!"], Some(h.root()));
}
#[test]
fn tag_set_invalid_tag_rejected() {
let h = TestHarness::new();
let run_v = run_with_tags(&h, &[]);
let job_id = run_v["job_id"].as_str().unwrap();
assert_usage_error(&["tag", "set", job_id, "--tag", "bad!tag"], Some(h.root()));
}
#[test]
fn notify_set_updates_notify_command_in_meta_json() {
let h = TestHarness::new();
let v = h.run(&["run", "--snapshot-after", "0", "--", "echo", "hello"]);
assert_envelope(&v, "run", true);
let job_id = v["job_id"].as_str().expect("job_id").to_string();
std::thread::sleep(std::time::Duration::from_millis(200));
let set_v = h.run(&[
"notify",
"set",
&job_id,
"--command",
"cat >/tmp/event.json",
]);
assert_envelope(&set_v, "notify.set", true);
assert_eq!(
set_v["job_id"].as_str().unwrap_or(""),
job_id,
"job_id must match"
);
assert_eq!(
set_v["notification"]["notify_command"]
.as_str()
.unwrap_or(""),
"cat >/tmp/event.json",
"notify_command must be updated"
);
let meta_path = std::path::Path::new(h.root())
.join(&job_id)
.join("meta.json");
let meta_raw = std::fs::read_to_string(&meta_path).expect("read meta.json");
let meta: serde_json::Value = serde_json::from_str(&meta_raw).expect("parse meta.json");
assert_eq!(
meta["notification"]["notify_command"]
.as_str()
.unwrap_or(""),
"cat >/tmp/event.json",
"meta.json notify_command must be updated on disk"
);
}
#[test]
fn notify_set_preserves_notify_file() {
let h = TestHarness::new();
let tmp_dir = tempfile::tempdir().expect("tempdir");
let events_file = tmp_dir.path().join("events.ndjson");
let events_file_str = events_file.to_str().unwrap();
let v = h.run(&[
"run",
"--snapshot-after",
"0",
"--notify-file",
events_file_str,
"--",
"echo",
"hello",
]);
assert_envelope(&v, "run", true);
let job_id = v["job_id"].as_str().expect("job_id").to_string();
std::thread::sleep(std::time::Duration::from_millis(200));
let set_v = h.run(&["notify", "set", &job_id, "--command", "cat >/dev/null"]);
assert_envelope(&set_v, "notify.set", true);
assert_eq!(
set_v["notification"]["notify_command"]
.as_str()
.unwrap_or(""),
"cat >/dev/null",
"notify_command must be set"
);
assert_eq!(
set_v["notification"]["notify_file"].as_str().unwrap_or(""),
events_file_str,
"notify_file must be preserved"
);
let meta_path = std::path::Path::new(h.root())
.join(&job_id)
.join("meta.json");
let meta_raw = std::fs::read_to_string(&meta_path).expect("read meta.json");
let meta: serde_json::Value = serde_json::from_str(&meta_raw).expect("parse meta.json");
assert_eq!(
meta["notification"]["notify_file"].as_str().unwrap_or(""),
events_file_str,
"notify_file must be preserved in meta.json on disk"
);
}
#[test]
fn notify_set_missing_job_returns_job_not_found() {
let h = TestHarness::new();
let v = h.run(&["notify", "set", "NONEXISTENT-JOB", "--command", "echo hi"]);
assert_envelope(&v, "error", false);
assert_eq!(
v["error"]["code"].as_str().unwrap_or(""),
"job_not_found",
"error.code must be job_not_found"
);
}
#[test]
fn notify_set_terminal_job_succeeds_without_executing_command() {
let h = TestHarness::new();
let tmp_dir = tempfile::tempdir().expect("tempdir");
let marker = tmp_dir.path().join("executed.txt");
let marker_str = marker.to_str().unwrap();
let v = h.run(&["run", "--wait", "--", "echo", "done"]);
assert_envelope(&v, "run", true);
assert_eq!(v["state"].as_str().unwrap_or(""), "exited");
let job_id = v["job_id"].as_str().expect("job_id").to_string();
assert!(!marker.exists(), "marker must not exist before notify set");
let hook_cmd = format!("touch {marker_str}");
let set_v = h.run(&["notify", "set", &job_id, "--command", &hook_cmd]);
assert_envelope(&set_v, "notify.set", true);
std::thread::sleep(std::time::Duration::from_millis(200));
assert!(
!marker.exists(),
"notify set must not execute the command (marker must not be created)"
);
}
#[test]
fn notify_set_updated_command_used_at_completion() {
let h = TestHarness::new();
let tmp_dir = tempfile::tempdir().expect("tempdir");
let captured = tmp_dir.path().join("captured.json");
let captured_str = captured.to_str().unwrap();
let v = h.run(&["run", "--snapshot-after", "0", "--", "sleep", "1"]);
assert_envelope(&v, "run", true);
let job_id = v["job_id"].as_str().expect("job_id").to_string();
let hook_cmd = format!("cat > {captured_str}");
let set_v = h.run(&["notify", "set", &job_id, "--command", &hook_cmd]);
assert_envelope(&set_v, "notify.set", true);
std::thread::sleep(std::time::Duration::from_millis(2500));
assert!(
captured.exists(),
"captured file must be created by the updated notify_command"
);
let content = std::fs::read_to_string(&captured).expect("read captured file");
let event: serde_json::Value =
serde_json::from_str(content.trim()).expect("captured content must be valid JSON");
assert_eq!(
event["event_type"].as_str().unwrap_or(""),
"job.finished",
"event_type must be job.finished"
);
assert_eq!(
event["job_id"].as_str().unwrap_or(""),
job_id,
"event job_id must match"
);
}
#[test]
fn global_root_flag_run() {
let tmp = tempfile::tempdir().expect("create tempdir");
let root = tmp.path().to_str().expect("valid UTF-8").to_string();
let v = run_cmd_with_global_root_flag(
&root,
&["run", "--snapshot-after", "0", "echo", "global_root_test"],
);
assert_envelope(&v, "run", true);
let job_id = v["job_id"].as_str().expect("job_id missing");
assert!(!job_id.is_empty(), "job_id is empty");
assert!(
tmp.path().join(job_id).exists(),
"job dir not created under global --root path"
);
}
#[test]
fn global_root_flag_status() {
let tmp = tempfile::tempdir().expect("create tempdir");
let root = tmp.path().to_str().expect("valid UTF-8").to_string();
let run_v =
run_cmd_with_global_root_flag(&root, &["run", "--snapshot-after", "0", "echo", "hi"]);
let job_id = run_v["job_id"].as_str().unwrap().to_string();
let v = run_cmd_with_global_root_flag(&root, &["status", &job_id]);
assert_envelope(&v, "status", true);
assert_eq!(v["job_id"].as_str().unwrap_or(""), job_id);
}
#[test]
fn global_root_flag_list() {
let tmp = tempfile::tempdir().expect("create tempdir");
let root = tmp.path().to_str().expect("valid UTF-8").to_string();
let run_v = run_cmd_with_global_root_flag(
&root,
&["run", "--snapshot-after", "0", "echo", "list_test"],
);
let job_id = run_v["job_id"].as_str().unwrap().to_string();
let v = run_cmd_with_global_root_flag(&root, &["list", "--all"]);
assert_envelope(&v, "list", true);
let jobs = v["jobs"].as_array().expect("jobs array missing");
assert!(
jobs.iter()
.any(|j| j["job_id"].as_str().unwrap_or("") == job_id),
"started job not found in list response"
);
}
#[test]
fn global_root_flag_gc() {
let tmp = tempfile::tempdir().expect("create tempdir");
let root = tmp.path().to_str().expect("valid UTF-8").to_string();
let v = run_cmd_with_global_root_flag(&root, &["gc", "--dry-run"]);
assert_gc_envelope(&v, true);
}
#[test]
fn global_root_flag_takes_precedence_over_env() {
let tmp_flag = tempfile::tempdir().expect("create tempdir for --root");
let tmp_env = tempfile::tempdir().expect("create tempdir for env");
let root_flag = tmp_flag.path().to_str().expect("valid UTF-8").to_string();
let root_env = tmp_env.path().to_str().expect("valid UTF-8").to_string();
let bin = binary();
let mut cmd = Command::new(&bin);
cmd.arg("--root").arg(&root_flag);
cmd.args(["run", "--snapshot-after", "0", "echo", "precedence"]);
cmd.env("AGENT_EXEC_ROOT", &root_env);
let output = cmd.output().expect("run binary");
let stdout = String::from_utf8_lossy(&output.stdout);
let v: serde_json::Value = serde_json::from_str(stdout.trim()).expect("valid JSON");
assert_envelope(&v, "run", true);
let job_id = v["job_id"].as_str().expect("job_id missing");
assert!(
tmp_flag.path().join(job_id).exists(),
"job must be in --root dir, not AGENT_EXEC_ROOT dir"
);
assert!(
!tmp_env.path().join(job_id).exists(),
"job must NOT be in AGENT_EXEC_ROOT dir when --root flag is set"
);
}
#[test]
fn subcommand_root_flag_compat_run() {
let tmp = tempfile::tempdir().expect("create tempdir");
let root = tmp.path().to_str().expect("valid UTF-8").to_string();
let v = run_cmd_with_subcommand_root_flag(
"run",
&root,
&["--snapshot-after", "0", "echo", "compat_run"],
);
assert_envelope(&v, "run", true);
let job_id = v["job_id"].as_str().expect("job_id missing");
assert!(
tmp.path().join(job_id).exists(),
"job dir not created under --root path when flag placed after subcommand"
);
}
#[test]
fn subcommand_root_flag_compat_status() {
let tmp = tempfile::tempdir().expect("create tempdir");
let root = tmp.path().to_str().expect("valid UTF-8").to_string();
let run_v = run_cmd_with_global_root_flag(
&root,
&["run", "--snapshot-after", "0", "echo", "compat_status"],
);
let job_id = run_v["job_id"].as_str().unwrap().to_string();
let v = run_cmd_with_subcommand_root_flag("status", &root, &[&job_id]);
assert_envelope(&v, "status", true);
assert_eq!(v["job_id"].as_str().unwrap_or(""), job_id);
}
#[test]
fn subcommand_root_flag_compat_list() {
let tmp = tempfile::tempdir().expect("create tempdir");
let root = tmp.path().to_str().expect("valid UTF-8").to_string();
let run_v = run_cmd_with_global_root_flag(
&root,
&["run", "--snapshot-after", "0", "echo", "compat_list"],
);
let job_id = run_v["job_id"].as_str().unwrap().to_string();
let v = run_cmd_with_subcommand_root_flag("list", &root, &["--all"]);
assert_envelope(&v, "list", true);
let jobs = v["jobs"].as_array().expect("jobs array");
assert!(
jobs.iter()
.any(|j| j["job_id"].as_str().unwrap_or("") == job_id),
"started job not found when using legacy --root position for list"
);
}
#[test]
fn subcommand_root_flag_compat_gc() {
let tmp = tempfile::tempdir().expect("create tempdir");
let root = tmp.path().to_str().expect("valid UTF-8").to_string();
let v = run_cmd_with_subcommand_root_flag("gc", &root, &["--dry-run"]);
assert_gc_envelope(&v, true);
}
#[test]
fn notify_set_saves_output_match_config() {
let h = TestHarness::new();
let v = h.run(&["run", "--snapshot-after", "0", "--", "echo", "hello"]);
assert_envelope(&v, "run", true);
let job_id = v["job_id"].as_str().expect("job_id").to_string();
std::thread::sleep(std::time::Duration::from_millis(200));
let set_v = h.run(&[
"notify",
"set",
&job_id,
"--output-pattern",
"ERROR",
"--output-command",
"cat >/dev/null",
]);
assert_envelope(&set_v, "notify.set", true);
assert_eq!(
set_v["notification"]["on_output_match"]["pattern"]
.as_str()
.unwrap_or(""),
"ERROR",
"on_output_match.pattern must be saved"
);
assert_eq!(
set_v["notification"]["on_output_match"]["match_type"]
.as_str()
.unwrap_or(""),
"contains",
"on_output_match.match_type defaults to contains"
);
assert_eq!(
set_v["notification"]["on_output_match"]["stream"]
.as_str()
.unwrap_or(""),
"either",
"on_output_match.stream defaults to either"
);
let meta_path = std::path::Path::new(h.root())
.join(&job_id)
.join("meta.json");
let meta_raw = std::fs::read_to_string(&meta_path).expect("read meta.json");
let meta: serde_json::Value = serde_json::from_str(&meta_raw).expect("parse meta.json");
assert_eq!(
meta["notification"]["on_output_match"]["pattern"]
.as_str()
.unwrap_or(""),
"ERROR",
"meta.json on_output_match.pattern must be persisted"
);
}
#[test]
fn notify_set_output_match_terminal_job_no_delivery() {
let h = TestHarness::new();
let tmp_dir = tempfile::tempdir().expect("tempdir");
let marker = tmp_dir.path().join("executed.txt");
let marker_str = marker.to_str().unwrap();
let v = h.run(&["run", "--wait", "--", "echo", "done"]);
assert_envelope(&v, "run", true);
let job_id = v["job_id"].as_str().expect("job_id").to_string();
let hook_cmd = format!("touch {marker_str}");
let set_v = h.run(&[
"notify",
"set",
&job_id,
"--output-pattern",
"done",
"--output-command",
&hook_cmd,
]);
assert_envelope(&set_v, "notify.set", true);
std::thread::sleep(std::time::Duration::from_millis(300));
assert!(
!marker.exists(),
"notify set on terminal job must not execute output-match command"
);
}
#[test]
fn notify_set_completion_and_output_match_coexist() {
let h = TestHarness::new();
let v = h.run(&["run", "--snapshot-after", "0", "--", "echo", "hello"]);
assert_envelope(&v, "run", true);
let job_id = v["job_id"].as_str().expect("job_id").to_string();
std::thread::sleep(std::time::Duration::from_millis(200));
let set_v = h.run(&[
"notify",
"set",
&job_id,
"--command",
"cat >/dev/null",
"--output-pattern",
"ERROR",
]);
assert_envelope(&set_v, "notify.set", true);
assert!(
set_v["notification"]["notify_command"].as_str().is_some(),
"notify_command must be present"
);
assert!(
set_v["notification"]["on_output_match"]["pattern"]
.as_str()
.is_some(),
"on_output_match must be present"
);
}
#[test]
fn notify_set_output_match_missing_job_returns_job_not_found() {
let h = TestHarness::new();
let v = h.run(&[
"notify",
"set",
"NONEXISTENT-JOB",
"--output-pattern",
"ERROR",
"--output-command",
"cat >/dev/null",
]);
assert_envelope(&v, "error", false);
assert_eq!(
v["error"]["code"].as_str().unwrap_or(""),
"job_not_found",
"error.code must be job_not_found"
);
}
#[test]
fn notify_set_output_match_regex_type() {
let h = TestHarness::new();
let v = h.run(&["run", "--snapshot-after", "0", "--", "echo", "hello"]);
assert_envelope(&v, "run", true);
let job_id = v["job_id"].as_str().expect("job_id").to_string();
std::thread::sleep(std::time::Duration::from_millis(200));
let set_v = h.run(&[
"notify",
"set",
&job_id,
"--output-pattern",
"ERR.*",
"--output-match-type",
"regex",
"--output-stream",
"stderr",
]);
assert_envelope(&set_v, "notify.set", true);
assert_eq!(
set_v["notification"]["on_output_match"]["match_type"]
.as_str()
.unwrap_or(""),
"regex",
);
assert_eq!(
set_v["notification"]["on_output_match"]["stream"]
.as_str()
.unwrap_or(""),
"stderr",
);
}
#[test]
fn output_match_command_sink_fires_on_matching_line() {
let h = TestHarness::new();
let tmp_dir = tempfile::tempdir().expect("tempdir");
let captured = tmp_dir.path().join("match.json");
let captured_str = captured.to_str().unwrap();
let v = h.run(&[
"run",
"--snapshot-after",
"0",
"--",
"sh",
"-c",
"sleep 0.3; echo ERROR_LINE",
]);
assert_envelope(&v, "run", true);
let job_id = v["job_id"].as_str().expect("job_id").to_string();
let hook_cmd = format!("cat > {captured_str}");
let set_v = h.run(&[
"notify",
"set",
&job_id,
"--output-pattern",
"ERROR_LINE",
"--output-command",
&hook_cmd,
]);
assert_envelope(&set_v, "notify.set", true);
std::thread::sleep(std::time::Duration::from_millis(2000));
assert!(
captured.exists(),
"output-match command sink must have been executed"
);
let content = std::fs::read_to_string(&captured).expect("read captured");
let event: serde_json::Value =
serde_json::from_str(content.trim()).expect("captured content must be valid JSON");
assert_eq!(
event["event_type"].as_str().unwrap_or(""),
"job.output.matched",
"event_type must be job.output.matched"
);
assert_eq!(
event["job_id"].as_str().unwrap_or(""),
job_id,
"event job_id must match"
);
assert_eq!(
event["pattern"].as_str().unwrap_or(""),
"ERROR_LINE",
"event pattern must match configured pattern"
);
assert_eq!(
event["stream"].as_str().unwrap_or(""),
"stdout",
"event stream must be stdout"
);
}
#[test]
fn output_match_file_sink_appends_per_match() {
let h = TestHarness::new();
let tmp_dir = tempfile::tempdir().expect("tempdir");
let events_file = tmp_dir.path().join("output_events.ndjson");
let events_file_str = events_file.to_str().unwrap();
let v = h.run(&[
"run",
"--snapshot-after",
"0",
"--",
"sh",
"-c",
"sleep 0.2; echo MATCH_ONE; echo MATCH_TWO",
]);
assert_envelope(&v, "run", true);
let job_id = v["job_id"].as_str().expect("job_id").to_string();
let set_v = h.run(&[
"notify",
"set",
&job_id,
"--output-pattern",
"MATCH_",
"--output-file",
events_file_str,
]);
assert_envelope(&set_v, "notify.set", true);
std::thread::sleep(std::time::Duration::from_millis(2500));
assert!(
events_file.exists(),
"output-match file sink must have been created"
);
let content = std::fs::read_to_string(&events_file).expect("read events file");
let lines: Vec<&str> = content.lines().filter(|l| !l.trim().is_empty()).collect();
assert_eq!(
lines.len(),
2,
"must have exactly two NDJSON lines (one per match)"
);
for line in &lines {
let ev: serde_json::Value = serde_json::from_str(line).expect("each line must be JSON");
assert_eq!(
ev["event_type"].as_str().unwrap_or(""),
"job.output.matched"
);
assert_eq!(ev["job_id"].as_str().unwrap_or(""), job_id);
}
}
#[test]
fn output_match_no_replay_of_pre_existing_output() {
let h = TestHarness::new();
let tmp_dir = tempfile::tempdir().expect("tempdir");
let marker = tmp_dir.path().join("replayed.txt");
let marker_str = marker.to_str().unwrap();
let v = h.run(&[
"run",
"--snapshot-after",
"0",
"--",
"sh",
"-c",
"echo MATCH_EARLY; sleep 2",
]);
assert_envelope(&v, "run", true);
let job_id = v["job_id"].as_str().expect("job_id").to_string();
std::thread::sleep(std::time::Duration::from_millis(500));
let hook_cmd = format!("touch {marker_str}");
let set_v = h.run(&[
"notify",
"set",
&job_id,
"--output-pattern",
"MATCH_EARLY",
"--output-command",
&hook_cmd,
]);
assert_envelope(&set_v, "notify.set", true);
std::thread::sleep(std::time::Duration::from_millis(600));
assert!(
!marker.exists(),
"output-match must not replay pre-existing output"
);
}
#[test]
fn output_match_sink_failure_does_not_change_job_state() {
let h = TestHarness::new();
let v = h.run(&[
"run",
"--snapshot-after",
"0",
"--",
"sh",
"-c",
"sleep 0.2; echo TRIGGER",
]);
assert_envelope(&v, "run", true);
let job_id = v["job_id"].as_str().expect("job_id").to_string();
let set_v = h.run(&[
"notify",
"set",
&job_id,
"--output-pattern",
"TRIGGER",
"--output-command",
"exit 1",
]);
assert_envelope(&set_v, "notify.set", true);
std::thread::sleep(std::time::Duration::from_millis(2000));
let status_v = h.run(&["status", &job_id]);
assert_envelope(&status_v, "status", true);
assert_eq!(
status_v["state"].as_str().unwrap_or(""),
"exited",
"job state must be exited even when output-match sink fails"
);
}
#[test]
fn output_match_notification_events_ndjson_written() {
let h = TestHarness::new();
let tmp_dir = tempfile::tempdir().expect("tempdir");
let events_file = tmp_dir.path().join("output_events.ndjson");
let events_file_str = events_file.to_str().unwrap();
let v = h.run(&[
"run",
"--snapshot-after",
"0",
"--",
"sh",
"-c",
"sleep 0.2; echo RECORD_ME",
]);
assert_envelope(&v, "run", true);
let job_id = v["job_id"].as_str().expect("job_id").to_string();
let set_v = h.run(&[
"notify",
"set",
&job_id,
"--output-pattern",
"RECORD_ME",
"--output-file",
events_file_str,
]);
assert_envelope(&set_v, "notify.set", true);
std::thread::sleep(std::time::Duration::from_millis(2000));
let notif_events = std::path::Path::new(h.root())
.join(&job_id)
.join("notification_events.ndjson");
assert!(
notif_events.exists(),
"notification_events.ndjson must be created in job dir"
);
let content = std::fs::read_to_string(¬if_events).expect("read notification_events.ndjson");
assert!(
!content.trim().is_empty(),
"notification_events.ndjson must contain at least one record"
);
let record: serde_json::Value = serde_json::from_str(content.lines().next().unwrap_or("{}"))
.expect("first line must be JSON");
assert_eq!(
record["event_type"].as_str().unwrap_or(""),
"job.output.matched"
);
}
#[test]
fn output_match_regex_pattern_fires_on_match() {
let h = TestHarness::new();
let tmp_dir = tempfile::tempdir().expect("tempdir");
let events_file = tmp_dir.path().join("regex_events.ndjson");
let events_file_str = events_file.to_str().unwrap();
let v = h.run(&[
"run",
"--snapshot-after",
"0",
"--",
"sh",
"-c",
"sleep 0.2; echo ERR123; echo INFO456",
]);
assert_envelope(&v, "run", true);
let job_id = v["job_id"].as_str().expect("job_id").to_string();
let set_v = h.run(&[
"notify",
"set",
&job_id,
"--output-pattern",
"^ERR",
"--output-match-type",
"regex",
"--output-file",
events_file_str,
]);
assert_envelope(&set_v, "notify.set", true);
std::thread::sleep(std::time::Duration::from_millis(2500));
assert!(
events_file.exists(),
"regex match must have triggered file sink"
);
let content = std::fs::read_to_string(&events_file).expect("read regex events file");
let lines: Vec<&str> = content.lines().filter(|l| !l.trim().is_empty()).collect();
assert_eq!(lines.len(), 1, "only ERR123 must match ^ERR regex");
let ev: serde_json::Value = serde_json::from_str(lines[0]).expect("line must be JSON");
assert_eq!(
ev["event_type"].as_str().unwrap_or(""),
"job.output.matched"
);
assert_eq!(ev["line"].as_str().unwrap_or(""), "ERR123");
}
#[test]
fn output_match_stream_stderr_only() {
let h = TestHarness::new();
let tmp_dir = tempfile::tempdir().expect("tempdir");
let events_file = tmp_dir.path().join("stderr_events.ndjson");
let events_file_str = events_file.to_str().unwrap();
let v = h.run(&[
"run",
"--snapshot-after",
"0",
"--",
"sh",
"-c",
"sleep 0.2; echo MATCH; echo MATCH >&2",
]);
assert_envelope(&v, "run", true);
let job_id = v["job_id"].as_str().expect("job_id").to_string();
let set_v = h.run(&[
"notify",
"set",
&job_id,
"--output-pattern",
"MATCH",
"--output-stream",
"stderr",
"--output-file",
events_file_str,
]);
assert_envelope(&set_v, "notify.set", true);
std::thread::sleep(std::time::Duration::from_millis(2500));
assert!(
events_file.exists(),
"stderr match must have triggered file sink"
);
let content = std::fs::read_to_string(&events_file).expect("read stderr events file");
let lines: Vec<&str> = content.lines().filter(|l| !l.trim().is_empty()).collect();
assert_eq!(
lines.len(),
1,
"only the stderr MATCH must be recorded (stdout MATCH must be ignored)"
);
let ev: serde_json::Value = serde_json::from_str(lines[0]).expect("line must be JSON");
assert_eq!(ev["stream"].as_str().unwrap_or(""), "stderr");
}
#[test]
fn output_match_near_future_line_triggers_delivery() {
let h = TestHarness::new();
let tmp_dir = tempfile::tempdir().expect("tempdir");
let events_file = tmp_dir.path().join("near_future_events.ndjson");
let events_file_str = events_file.to_str().unwrap();
let v = h.run(&[
"run",
"--snapshot-after",
"0",
"--",
"sh",
"-c",
"for i in $(seq 1 8); do echo heartbeat_$i; sleep 0.2; done; sleep 0.5; echo CLOSE_CALL_MATCH",
]);
assert_envelope(&v, "run", true);
let job_id = v["job_id"].as_str().expect("job_id").to_string();
std::thread::sleep(std::time::Duration::from_millis(1800));
let set_v = h.run(&[
"notify",
"set",
&job_id,
"--output-pattern",
"CLOSE_CALL_MATCH",
"--output-file",
events_file_str,
]);
assert_envelope(&set_v, "notify.set", true);
std::thread::sleep(std::time::Duration::from_millis(3000));
assert!(
events_file.exists(),
"output-match file sink must have been written: per-line reload must make \
the notify set update visible even when the matching line arrives <100 ms after it"
);
let content = std::fs::read_to_string(&events_file).expect("read near_future_events");
let lines: Vec<&str> = content.lines().filter(|l| !l.trim().is_empty()).collect();
assert_eq!(
lines.len(),
1,
"exactly one match must be recorded for CLOSE_CALL_MATCH"
);
let ev: serde_json::Value = serde_json::from_str(lines[0]).expect("line must be JSON");
assert_eq!(
ev["event_type"].as_str().unwrap_or(""),
"job.output.matched",
"event_type must be job.output.matched"
);
assert_eq!(
ev["line"].as_str().unwrap_or(""),
"CLOSE_CALL_MATCH",
"line field must contain the matched output line"
);
}
#[test]
fn create_tag_persisted_same_shape_as_run() {
let h = TestHarness::new();
let c = h.run(&["create", "--tag", "aaa", "--tag", "bbb", "--", "true"]);
assert_envelope(&c, "create", true);
let create_job_id = c["job_id"].as_str().expect("job_id");
let create_meta_path = std::path::Path::new(h.root())
.join(create_job_id)
.join("meta.json");
let create_meta: serde_json::Value =
serde_json::from_slice(&std::fs::read(&create_meta_path).unwrap()).unwrap();
let create_tags: Vec<&str> = create_meta["tags"]
.as_array()
.expect("tags in create meta.json")
.iter()
.map(|t| t.as_str().unwrap())
.collect();
let r = h.run(&[
"run",
"--snapshot-after",
"0",
"--tag",
"aaa",
"--tag",
"bbb",
"--",
"true",
]);
assert_envelope(&r, "run", true);
let run_job_id = r["job_id"].as_str().expect("job_id");
let run_meta_path = std::path::Path::new(h.root())
.join(run_job_id)
.join("meta.json");
let run_meta: serde_json::Value =
serde_json::from_slice(&std::fs::read(&run_meta_path).unwrap()).unwrap();
let run_tags: Vec<&str> = run_meta["tags"]
.as_array()
.expect("tags in run meta.json")
.iter()
.map(|t| t.as_str().unwrap())
.collect();
assert_eq!(
create_tags, run_tags,
"create and run must persist the same tag shape"
);
assert_eq!(create_tags, vec!["aaa", "bbb"]);
}
#[test]
fn create_tag_deduplication() {
let h = TestHarness::new();
let v = h.run(&[
"create", "--tag", "aaa", "--tag", "bbb", "--tag", "aaa", "--", "true",
]);
assert_envelope(&v, "create", true);
let job_id = v["job_id"].as_str().unwrap();
let meta_path = std::path::Path::new(h.root())
.join(job_id)
.join("meta.json");
let meta: serde_json::Value =
serde_json::from_slice(&std::fs::read(&meta_path).unwrap()).unwrap();
let tags: Vec<&str> = meta["tags"]
.as_array()
.expect("tags")
.iter()
.map(|t| t.as_str().unwrap())
.collect();
assert_eq!(tags, vec!["aaa", "bbb"], "duplicates must be removed");
}
#[test]
fn create_no_tags_persists_empty_array() {
let h = TestHarness::new();
let v = h.run(&["create", "--", "true"]);
assert_envelope(&v, "create", true);
let job_id = v["job_id"].as_str().unwrap();
let meta_path = std::path::Path::new(h.root())
.join(job_id)
.join("meta.json");
let meta: serde_json::Value =
serde_json::from_slice(&std::fs::read(&meta_path).unwrap()).unwrap();
let tags = meta["tags"].as_array().expect("tags must be present");
assert!(tags.is_empty(), "tags must be empty when none specified");
}
#[test]
fn create_notify_command_persisted_same_shape_as_run() {
let h = TestHarness::new();
let notify_cmd = "cat >/dev/null";
let c = h.run(&["create", "--notify-command", notify_cmd, "--", "true"]);
assert_envelope(&c, "create", true);
let create_job_id = c["job_id"].as_str().unwrap();
let create_meta: serde_json::Value = serde_json::from_slice(
&std::fs::read(
std::path::Path::new(h.root())
.join(create_job_id)
.join("meta.json"),
)
.unwrap(),
)
.unwrap();
let r = h.run(&[
"run",
"--snapshot-after",
"0",
"--notify-command",
notify_cmd,
"--",
"true",
]);
assert_envelope(&r, "run", true);
let run_job_id = r["job_id"].as_str().unwrap();
let run_meta: serde_json::Value = serde_json::from_slice(
&std::fs::read(
std::path::Path::new(h.root())
.join(run_job_id)
.join("meta.json"),
)
.unwrap(),
)
.unwrap();
assert_eq!(
create_meta["notification"]["notify_command"], run_meta["notification"]["notify_command"],
"notify_command must be persisted with the same shape by create and run"
);
}
#[test]
fn create_output_pattern_persisted_same_shape_as_run() {
let h = TestHarness::new();
let tmp_dir = tempfile::tempdir().expect("tempdir");
let events_file = tmp_dir.path().join("events.ndjson");
let events_path = events_file.to_str().unwrap();
let c = h.run(&[
"create",
"--output-pattern",
"ERROR",
"--output-command",
"cat >/dev/null",
"--output-file",
events_path,
"--",
"sh",
"-c",
"echo ERROR",
]);
assert_envelope(&c, "create", true);
let create_job_id = c["job_id"].as_str().unwrap();
let create_meta: serde_json::Value = serde_json::from_slice(
&std::fs::read(
std::path::Path::new(h.root())
.join(create_job_id)
.join("meta.json"),
)
.unwrap(),
)
.unwrap();
let r = h.run(&[
"run",
"--snapshot-after",
"0",
"--output-pattern",
"ERROR",
"--output-command",
"cat >/dev/null",
"--output-file",
events_path,
"--",
"sh",
"-c",
"echo ERROR",
]);
assert_envelope(&r, "run", true);
let run_job_id = r["job_id"].as_str().unwrap();
let run_meta: serde_json::Value = serde_json::from_slice(
&std::fs::read(
std::path::Path::new(h.root())
.join(run_job_id)
.join("meta.json"),
)
.unwrap(),
)
.unwrap();
let create_match = &create_meta["notification"]["on_output_match"];
let run_match = &run_meta["notification"]["on_output_match"];
assert_eq!(
create_match["pattern"], run_match["pattern"],
"on_output_match.pattern must match between create and run"
);
assert_eq!(
create_match["command"], run_match["command"],
"on_output_match.command must match between create and run"
);
assert_eq!(
create_match["file"], run_match["file"],
"on_output_match.file must match between create and run"
);
}
#[test]
fn create_does_not_trigger_notification_side_effects() {
let h = TestHarness::new();
let tmp_dir = tempfile::tempdir().expect("tempdir");
let events_file = tmp_dir.path().join("create_side_effects.ndjson");
let events_path = events_file.to_str().unwrap();
let c = h.run(&[
"create",
"--notify-command",
&format!("echo triggered >> {}", events_path),
"--output-pattern",
"ERROR",
"--output-file",
events_path,
"--",
"sh",
"-c",
"echo ERROR",
]);
assert_envelope(&c, "create", true);
std::thread::sleep(std::time::Duration::from_millis(200));
assert!(
!events_file.exists(),
"create must not execute notification sinks or the command"
);
}
#[test]
fn start_uses_tags_persisted_by_create() {
let h = TestHarness::new();
let c = h.run(&["create", "--tag", "mytag", "--tag", "other", "--", "true"]);
assert_envelope(&c, "create", true);
let job_id = c["job_id"].as_str().unwrap().to_string();
let s = h.run(&["start", "--snapshot-after", "0", &job_id]);
assert_envelope(&s, "start", true);
let tags: Vec<&str> = s["tags"]
.as_array()
.expect("tags in start response")
.iter()
.map(|t| t.as_str().unwrap())
.collect();
assert_eq!(
tags,
vec!["mytag", "other"],
"start must return the tags persisted by create"
);
}
#[test]
fn start_uses_output_match_notification_persisted_by_create() {
let h = TestHarness::new();
let tmp_dir = tempfile::tempdir().expect("tempdir");
let events_file = tmp_dir.path().join("start_output_match.ndjson");
let events_path = events_file.to_str().unwrap();
let c = h.run(&[
"create",
"--output-pattern",
"MATCH_ME",
"--output-file",
events_path,
"--",
"sh",
"-c",
"echo MATCH_ME",
]);
assert_envelope(&c, "create", true);
let job_id = c["job_id"].as_str().unwrap().to_string();
let s = h.run(&["start", "--snapshot-after", "0", &job_id]);
assert_envelope(&s, "start", true);
std::thread::sleep(std::time::Duration::from_millis(2000));
assert!(
events_file.exists(),
"output-match event file must be written when start uses persisted create config"
);
let content = std::fs::read_to_string(&events_file).unwrap();
let lines: Vec<&str> = content.lines().filter(|l| !l.trim().is_empty()).collect();
assert_eq!(lines.len(), 1, "exactly one match event must be written");
let ev: serde_json::Value = serde_json::from_str(lines[0]).unwrap();
assert_eq!(
ev["event_type"].as_str().unwrap_or(""),
"job.output.matched"
);
assert_eq!(ev["line"].as_str().unwrap_or(""), "MATCH_ME");
}
#[test]
#[cfg(unix)]
fn status_becomes_terminal_when_root_exits_despite_inherited_stdio() {
let h = TestHarness::new();
let run_v = h.run(&[
"run",
"--snapshot-after",
"0",
"--",
"sh",
"-c",
"sleep 30 &",
]);
assert_envelope(&run_v, "run", true);
let job_id = run_v["job_id"].as_str().unwrap().to_string();
let deadline = std::time::Instant::now() + std::time::Duration::from_secs(10);
let poll = std::time::Duration::from_millis(50);
let mut observed_state = String::new();
while std::time::Instant::now() < deadline {
let v = h.run(&["status", &job_id]);
let state = v["state"].as_str().unwrap_or("").to_string();
if state != "running" && state != "created" {
observed_state = state;
break;
}
std::thread::sleep(poll);
}
assert!(
!observed_state.is_empty() && observed_state != "running",
"job must reach a terminal state promptly after wrapped root exits; \
stuck in state={observed_state:?} (regression: lingering running state)"
);
}
#[test]
#[cfg(unix)]
fn supervise_exits_promptly_after_root_exits_despite_inherited_stdio() {
let h = TestHarness::new();
let run_v = h.run(&[
"run",
"--snapshot-after",
"0",
"--",
"sh",
"-c",
"sleep 30 &",
]);
assert_envelope(&run_v, "run", true);
let job_id = run_v["job_id"].as_str().unwrap().to_string();
let status_deadline = std::time::Instant::now() + std::time::Duration::from_secs(10);
let poll = std::time::Duration::from_millis(50);
let mut reached_terminal = false;
while std::time::Instant::now() < status_deadline {
let v = h.run(&["status", &job_id]);
let state = v["state"].as_str().unwrap_or("");
if state != "running" && state != "created" {
reached_terminal = true;
break;
}
std::thread::sleep(poll);
}
assert!(
reached_terminal,
"prerequisite: job must reach terminal state before checking supervisor linger"
);
let linger_deadline = std::time::Instant::now() + std::time::Duration::from_secs(5);
let mut supervisor_lingering = true;
let pgrep_pattern = format!("_supervise.*{job_id}");
while std::time::Instant::now() < linger_deadline {
std::thread::sleep(std::time::Duration::from_millis(100));
let result = std::process::Command::new("pgrep")
.arg("-f")
.arg(&pgrep_pattern)
.output();
match result {
Ok(output) => {
if output.stdout.iter().all(|b| b.is_ascii_whitespace()) {
supervisor_lingering = false;
break;
}
}
Err(_) => {
supervisor_lingering = false;
break;
}
}
}
assert!(
!supervisor_lingering,
"_supervise must not linger after job reaches terminal state \
(job_id={job_id}; background sleep 30 holds inherited pipe ends)"
);
}
#[test]
#[cfg(unix)]
fn argv_mode_exec_handoff_completes() {
let h = TestHarness::new();
let v = h.run(&["run", "--wait", "--", "sh", "-c", "echo argv-ok"]);
assert_envelope(&v, "run", true);
assert_eq!(v["exit_code"], 0, "argv-mode job must exit 0");
let job_id = v["job_id"].as_str().unwrap();
let status_v = h.run(&["status", job_id]);
assert_eq!(
status_v["state"].as_str().unwrap_or(""),
"exited",
"argv-mode job must reach exited state"
);
let logs_v = h.run(&["tail", job_id, "--tail-lines", "5"]);
let stdout_tail = logs_v["stdout_tail"].as_str().unwrap_or("");
assert!(
stdout_tail.contains("argv-ok"),
"stdout must contain 'argv-ok'; got: {stdout_tail:?}"
);
}
#[test]
#[cfg(unix)]
fn shell_string_mode_preserved_after_argv_change() {
let h = TestHarness::new();
let v = h.run(&["run", "--wait", "--", "echo string-ok && echo string-two"]);
assert_envelope(&v, "run", true);
assert_eq!(v["exit_code"], 0, "shell-string mode job must exit 0");
let job_id = v["job_id"].as_str().unwrap();
let logs_v = h.run(&["tail", job_id, "--tail-lines", "5"]);
let stdout_tail = logs_v["stdout_tail"].as_str().unwrap_or("");
assert!(
stdout_tail.contains("string-ok"),
"stdout must contain 'string-ok'; got: {stdout_tail:?}"
);
assert!(
stdout_tail.contains("string-two"),
"stdout must contain 'string-two' (shell && operator); got: {stdout_tail:?}"
);
}
#[test]
#[cfg(unix)]
fn argv_mode_completion_aligns_with_workload_boundary_issue5_regression() {
let h = TestHarness::new();
let run_v = h.run(&[
"run",
"--snapshot-after",
"0",
"--",
"sh",
"-c",
"sleep 30 &",
]);
assert_envelope(&run_v, "run", true);
let job_id = run_v["job_id"].as_str().unwrap().to_string();
let deadline = std::time::Instant::now() + std::time::Duration::from_secs(10);
let poll = std::time::Duration::from_millis(50);
let mut observed_state = String::new();
while std::time::Instant::now() < deadline {
let v = h.run(&["status", &job_id]);
let state = v["state"].as_str().unwrap_or("").to_string();
if state != "running" && state != "created" {
observed_state = state;
break;
}
std::thread::sleep(poll);
}
assert!(
!observed_state.is_empty() && observed_state != "running",
"argv-mode job must reach terminal state promptly after workload exits \
(issue #5 regression); stuck in state={observed_state:?}"
);
}
#[test]
#[cfg(unix)]
fn status_remains_running_while_root_alive_despite_success_output_post_0_1_10_issue5() {
let h = TestHarness::new();
let run_v = h.run(&[
"run",
"--snapshot-after",
"0",
"--",
"sh",
"-c",
concat!(
"echo 'No changes found for parallel execution'; ",
"echo 'Orchestrator completed successfully'; ",
"sleep 30"
),
]);
assert_envelope(&run_v, "run", true);
let job_id = run_v["job_id"].as_str().unwrap().to_string();
std::thread::sleep(std::time::Duration::from_millis(500));
let tail_v = h.run(&["tail", &job_id, "--tail-lines", "20"]);
let stdout_tail = tail_v["stdout_tail"].as_str().unwrap_or("");
assert!(
stdout_tail.contains("Orchestrator completed successfully"),
"success-like output must be visible in stdout log before process exits; \
got: {stdout_tail:?}"
);
let status_v = h.run(&["status", &job_id]);
let state = status_v["state"].as_str().unwrap_or("");
assert_eq!(
state, "running",
"status must remain `running` while the root workload process is alive, \
even when success-like output is already present in the log \
(post-0.1.10 issue #5 shape; fix must be in the upstream workload)"
);
h.run(&["kill", "--signal", "KILL", &job_id]);
}
#[test]
#[cfg(not(unix))]
fn argv_mode_non_unix_shell_string_fallback_completes() {
let h = TestHarness::new();
let v = h.run(&["run", "--wait", "--", "cmd", "/C", "echo argv-win-ok"]);
assert_envelope(&v, "run", true);
assert_eq!(v["exit_code"], 0, "argv-mode non-Unix job must exit 0");
let job_id = v["job_id"].as_str().unwrap();
let status_v = h.run(&["status", job_id]);
assert_eq!(
status_v["state"].as_str().unwrap_or(""),
"exited",
"argv-mode non-Unix job must reach exited state"
);
let logs_v = h.run(&["tail", job_id, "--tail-lines", "5"]);
let stdout_tail = logs_v["stdout_tail"].as_str().unwrap_or("");
assert!(
stdout_tail.contains("argv-win-ok"),
"stdout must contain 'argv-win-ok' on non-Unix argv fallback; got: {stdout_tail:?}"
);
}
fn run_yaml_raw(args: &[&str], root: &str) -> String {
let bin = binary();
let mut cmd = Command::new(&bin);
cmd.arg("--yaml");
cmd.args(args);
cmd.env("AGENT_EXEC_ROOT", root);
let output = cmd.output().expect("run binary");
String::from_utf8_lossy(&output.stdout).into_owned()
}
fn run_yaml(args: &[&str], root: &str) -> serde_json::Value {
let raw = run_yaml_raw(args, root);
let stderr = {
let bin = binary();
let mut cmd = Command::new(&bin);
cmd.arg("--yaml");
cmd.args(args);
cmd.env("AGENT_EXEC_ROOT", root);
let output = cmd.output().expect("run binary");
String::from_utf8_lossy(&output.stderr).into_owned()
};
assert!(!raw.trim().is_empty(), "stdout is empty (stderr: {stderr})");
let yaml_val: serde_yaml::Value = serde_yaml::from_str(&raw).unwrap_or_else(|e| {
panic!("stdout is not valid YAML: {e}\nstdout: {raw}\nstderr: {stderr}")
});
serde_json::to_value(&yaml_val).expect("yaml->json conversion")
}
#[test]
fn yaml_flag_run_returns_yaml() {
let h = TestHarness::new();
let raw = run_yaml_raw(
&["run", "--snapshot-after", "0", "echo", "yaml_test"],
h.root(),
);
assert!(!raw.trim().is_empty(), "stdout empty");
let parsed: serde_yaml::Value =
serde_yaml::from_str(&raw).unwrap_or_else(|e| panic!("not valid YAML: {e}\nstdout: {raw}"));
assert!(parsed.is_mapping(), "expected YAML mapping");
}
#[test]
fn yaml_flag_run_envelope_fields() {
let h = TestHarness::new();
let v = run_yaml(&["run", "--snapshot-after", "0", "echo", "hi"], h.root());
assert_envelope(&v, "run", true);
assert!(v["job_id"].as_str().is_some(), "job_id missing: {v}");
}
#[test]
fn yaml_flag_status_success() {
let h = TestHarness::new();
let run_v = run_yaml(&["run", "--snapshot-after", "0", "echo", "hi"], h.root());
let job_id = run_v["job_id"].as_str().unwrap().to_string();
let v = run_yaml(&["status", &job_id], h.root());
assert_envelope(&v, "status", true);
assert_eq!(v["job_id"].as_str().unwrap_or(""), job_id);
}
#[test]
fn yaml_flag_error_response() {
let h = TestHarness::new();
let v = run_yaml(&["status", "NONEXISTENT_JOB_ID_YAML"], h.root());
assert_envelope(&v, "error", false);
assert_eq!(v["error"]["code"].as_str().unwrap_or(""), "job_not_found");
}
#[test]
fn yaml_flag_schema_returns_yaml() {
let h = TestHarness::new();
let v = run_yaml(&["schema"], h.root());
assert_envelope(&v, "schema", true);
assert!(
v["schema"].is_object() || v["schema"].is_string(),
"schema field missing or wrong type: {v}"
);
}
#[test]
fn json_default_still_works_without_yaml_flag() {
let h = TestHarness::new();
let v = h.run(&["run", "--snapshot-after", "0", "echo", "json_default"]);
assert_envelope(&v, "run", true);
}
#[test]
fn yaml_flag_after_subcommand_works() {
let h = TestHarness::new();
let bin = binary();
let mut cmd = Command::new(&bin);
cmd.args([
"run",
"--yaml",
"--snapshot-after",
"0",
"echo",
"global_test",
]);
cmd.env("AGENT_EXEC_ROOT", h.root());
let output = cmd.output().expect("run binary");
let raw = String::from_utf8_lossy(&output.stdout);
let parsed: serde_yaml::Value =
serde_yaml::from_str(&raw).unwrap_or_else(|e| panic!("not valid YAML: {e}\nstdout: {raw}"));
assert!(parsed.is_mapping(), "expected YAML mapping");
}
#[test]
fn delete_single_removes_finished_job() {
let h = TestHarness::new();
let run_v = h.run(&["run", "--snapshot-after", "0", "echo", "delete_me"]);
let job_id = run_v["job_id"].as_str().unwrap().to_string();
h.run(&["wait", &job_id]);
let v = h.run(&["delete", &job_id]);
assert_envelope(&v, "delete", true);
assert_eq!(
v["deleted"].as_u64().unwrap_or(0),
1,
"expected deleted=1: {v}"
);
assert_eq!(
v["skipped"].as_u64().unwrap_or(1),
0,
"expected skipped=0: {v}"
);
assert!(v["jobs"].is_array(), "jobs field missing: {v}");
let jobs = v["jobs"].as_array().unwrap();
assert_eq!(jobs.len(), 1);
assert_eq!(jobs[0]["job_id"].as_str().unwrap_or(""), job_id);
assert_eq!(jobs[0]["action"].as_str().unwrap_or(""), "deleted");
let status_v = h.run(&["status", &job_id]);
assert!(
!status_v["ok"].as_bool().unwrap_or(true),
"expected ok=false after delete: {status_v}"
);
assert_eq!(
status_v["error"]["code"].as_str().unwrap_or(""),
"job_not_found"
);
}
#[test]
fn delete_running_job_returns_invalid_state() {
let h = TestHarness::new();
let run_v = h.run(&["run", "--snapshot-after", "0", "sleep", "30"]);
let job_id = run_v["job_id"].as_str().unwrap().to_string();
assert_eq!(run_v["state"].as_str().unwrap_or(""), "running");
let v = h.run(&["delete", &job_id]);
assert!(
!v["ok"].as_bool().unwrap_or(true),
"expected ok=false for running job: {v}"
);
assert_eq!(v["error"]["code"].as_str().unwrap_or(""), "invalid_state");
let status_v = h.run(&["status", &job_id]);
assert!(
status_v["ok"].as_bool().unwrap_or(false),
"job directory must still exist: {status_v}"
);
h.run(&["kill", &job_id]);
}
#[test]
fn delete_nonexistent_job_returns_job_not_found() {
let h = TestHarness::new();
let v = h.run(&["delete", "NONEXISTENT_JOB_ID_XYZ"]);
assert!(!v["ok"].as_bool().unwrap_or(true), "expected ok=false: {v}");
assert_eq!(v["error"]["code"].as_str().unwrap_or(""), "job_not_found");
}
#[test]
fn delete_dry_run_single_preserves_directory() {
let h = TestHarness::new();
let run_v = h.run(&["run", "--snapshot-after", "0", "echo", "dry_run_single"]);
let job_id = run_v["job_id"].as_str().unwrap().to_string();
h.run(&["wait", &job_id]);
let v = h.run(&["delete", "--dry-run", &job_id]);
assert_envelope(&v, "delete", true);
assert!(v["dry_run"].as_bool().unwrap_or(false));
assert_eq!(
v["deleted"].as_u64().unwrap_or(1),
0,
"dry-run must not count deleted: {v}"
);
let jobs = v["jobs"].as_array().unwrap();
assert_eq!(jobs[0]["action"].as_str().unwrap_or(""), "would_delete");
let status_v = h.run(&["status", &job_id]);
assert!(
status_v["ok"].as_bool().unwrap_or(false),
"job must still exist after dry-run: {status_v}"
);
}
#[test]
fn delete_all_scopes_to_current_cwd() {
let h = TestHarness::new();
let dir_a = tempfile::tempdir().unwrap();
let dir_b = tempfile::tempdir().unwrap();
let (va, _) = run_cmd_with_root_and_cwd(
&["run", "--snapshot-after", "0", "echo", "job_a"],
Some(h.root()),
Some(dir_a.path()),
);
let job_a = va["job_id"].as_str().unwrap().to_string();
let (vb, _) = run_cmd_with_root_and_cwd(
&["run", "--snapshot-after", "0", "echo", "job_b"],
Some(h.root()),
Some(dir_b.path()),
);
let job_b = vb["job_id"].as_str().unwrap().to_string();
h.run(&["wait", &job_a]);
h.run(&["wait", &job_b]);
let (del_v, _) =
run_cmd_with_root_and_cwd(&["delete", "--all"], Some(h.root()), Some(dir_a.path()));
assert_envelope(&del_v, "delete", true);
let status_a = h.run(&["status", &job_a]);
assert_eq!(
status_a["error"]["code"].as_str().unwrap_or(""),
"job_not_found",
"job A must be deleted: {status_a}"
);
let status_b = h.run(&["status", &job_b]);
assert!(
status_b["ok"].as_bool().unwrap_or(false),
"job B must survive: {status_b}"
);
}
#[test]
fn delete_all_skips_running_and_created_jobs() {
let h = TestHarness::new();
let dir = tempfile::tempdir().unwrap();
let (run_v, _) = run_cmd_with_root_and_cwd(
&["run", "--snapshot-after", "0", "sleep", "30"],
Some(h.root()),
Some(dir.path()),
);
let running_job_id = run_v["job_id"].as_str().unwrap().to_string();
assert_eq!(run_v["state"].as_str().unwrap_or(""), "running");
let (del_v, _) =
run_cmd_with_root_and_cwd(&["delete", "--all"], Some(h.root()), Some(dir.path()));
assert_envelope(&del_v, "delete", true);
assert_eq!(
del_v["deleted"].as_u64().unwrap_or(1),
0,
"should delete nothing: {del_v}"
);
let status_v = h.run(&["status", &running_job_id]);
assert!(
status_v["ok"].as_bool().unwrap_or(false),
"running job must survive: {status_v}"
);
let jobs = del_v["jobs"].as_array().unwrap();
let skipped = jobs
.iter()
.filter(|j| j["action"].as_str().unwrap_or("") == "skipped")
.count();
assert!(skipped >= 1, "expected at least one skipped entry: {del_v}");
h.run(&["kill", &running_job_id]);
}
#[test]
fn delete_all_dry_run_preserves_directories() {
let h = TestHarness::new();
let dir = tempfile::tempdir().unwrap();
let (run_v, _) = run_cmd_with_root_and_cwd(
&["run", "--snapshot-after", "0", "echo", "dry_all"],
Some(h.root()),
Some(dir.path()),
);
let job_id = run_v["job_id"].as_str().unwrap().to_string();
h.run(&["wait", &job_id]);
let (del_v, _) = run_cmd_with_root_and_cwd(
&["delete", "--dry-run", "--all"],
Some(h.root()),
Some(dir.path()),
);
assert_envelope(&del_v, "delete", true);
assert!(del_v["dry_run"].as_bool().unwrap_or(false));
assert_eq!(
del_v["deleted"].as_u64().unwrap_or(1),
0,
"dry-run must not delete: {del_v}"
);
let jobs = del_v["jobs"].as_array().unwrap();
let would_delete: Vec<_> = jobs
.iter()
.filter(|j| j["action"].as_str().unwrap_or("") == "would_delete")
.collect();
assert!(
!would_delete.is_empty(),
"expected at least one would_delete entry: {del_v}"
);
let status_v = h.run(&["status", &job_id]);
assert!(
status_v["ok"].as_bool().unwrap_or(false),
"job must still exist after dry-run: {status_v}"
);
}
fn run_raw(args: &[&str]) -> (String, i32) {
let bin = binary();
let mut cmd = Command::new(&bin);
cmd.args(args);
let output = cmd.output().expect("run binary");
let stdout = String::from_utf8_lossy(&output.stdout).to_string();
let code = output.status.code().unwrap_or(-1);
(stdout, code)
}
#[test]
fn completions_bash_outputs_nonempty_script() {
let (stdout, code) = run_raw(&["completions", "bash"]);
assert_eq!(code, 0, "exit code should be 0 for 'completions bash'");
assert!(
!stdout.trim().is_empty(),
"stdout should be non-empty for 'completions bash'"
);
assert!(
stdout.contains("agent-exec") || stdout.contains("agent_exec"),
"bash completion script should reference agent-exec: {stdout}"
);
}
#[test]
fn completions_zsh_outputs_nonempty_script() {
let (stdout, code) = run_raw(&["completions", "zsh"]);
assert_eq!(code, 0, "exit code should be 0 for 'completions zsh'");
assert!(
!stdout.trim().is_empty(),
"stdout should be non-empty for 'completions zsh'"
);
}
#[test]
fn completions_fish_outputs_nonempty_script() {
let (stdout, code) = run_raw(&["completions", "fish"]);
assert_eq!(code, 0, "exit code should be 0 for 'completions fish'");
assert!(
!stdout.trim().is_empty(),
"stdout should be non-empty for 'completions fish'"
);
}
#[test]
fn completions_powershell_outputs_nonempty_script() {
let (stdout, code) = run_raw(&["completions", "powershell"]);
assert_eq!(
code, 0,
"exit code should be 0 for 'completions powershell'"
);
assert!(
!stdout.trim().is_empty(),
"stdout should be non-empty for 'completions powershell'"
);
}
#[test]
fn completions_invalid_shell_exits_with_code_2() {
let bin = binary();
let output = Command::new(&bin)
.args(["completions", "invalid"])
.output()
.expect("run binary");
assert_eq!(
output.status.code(),
Some(2),
"expected exit code 2 for 'completions invalid'"
);
}
#[test]
fn list_state_invalid_value_exits_with_code_2() {
let bin = binary();
let output = Command::new(&bin)
.args(["list", "--all", "--state", "bogus"])
.output()
.expect("run binary");
let stdout = String::from_utf8_lossy(&output.stdout);
assert_eq!(
output.status.code(),
Some(2),
"expected exit code 2 for invalid --state value, stdout: {stdout}"
);
assert!(
stdout.trim().is_empty(),
"stdout should be empty for invalid --state usage error: {stdout}"
);
}
#[test]
fn version_flag_prints_version_and_exits_zero() {
let bin = binary();
let pkg_version = env!("CARGO_PKG_VERSION");
for flag in &["--version", "-V"] {
let output = std::process::Command::new(&bin)
.arg(flag)
.output()
.unwrap_or_else(|e| panic!("failed to run binary with {flag}: {e}"));
assert!(
output.status.success(),
"exit code is non-zero for {flag}: {:?}",
output.status
);
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(
stdout.contains("agent-exec") && stdout.contains(pkg_version),
"stdout does not match 'agent-exec <version>' for {flag}: {stdout}"
);
}
}
fn run_cmd_raw(args: &[&str], root: Option<&str>) -> (serde_json::Value, i32) {
let bin = binary();
let mut cmd = std::process::Command::new(&bin);
cmd.args(args);
if let Some(r) = root {
cmd.env("AGENT_EXEC_ROOT", r);
}
let output = cmd.output().expect("run binary");
let stdout = String::from_utf8_lossy(&output.stdout);
let exit_code = output.status.code().unwrap_or(-1);
let v: serde_json::Value = serde_json::from_str(stdout.trim())
.unwrap_or_else(|e| panic!("stdout is not valid JSON: {e}\nstdout: {stdout}"));
(v, exit_code)
}
#[test]
fn prefix_lookup_resolves() {
let h = TestHarness::new();
let run_v = h.run(&["run", "--snapshot-after", "0", "echo", "prefix_test"]);
let full_id = run_v["job_id"]
.as_str()
.expect("job_id missing")
.to_string();
let prefix = &full_id[..10];
let v = h.run(&["status", prefix]);
assert_envelope(&v, "status", true);
assert_eq!(
v["job_id"].as_str().unwrap_or(""),
full_id,
"job_id in response must be the resolved full ID"
);
}
#[test]
fn ambiguous_prefix_returns_error() {
let h = TestHarness::new();
let run_v1 = h.run(&["run", "--snapshot-after", "0", "echo", "job1"]);
let id1 = run_v1["job_id"]
.as_str()
.expect("job_id missing")
.to_string();
let run_v2 = h.run(&["run", "--snapshot-after", "0", "echo", "job2"]);
let id2 = run_v2["job_id"]
.as_str()
.expect("job_id missing")
.to_string();
let shared_len = id1
.chars()
.zip(id2.chars())
.take_while(|(a, b)| a == b)
.count();
if shared_len == 0 {
return;
}
let prefix = &id1[..shared_len];
let (v, exit_code) = run_cmd_raw(&["status", prefix], Some(h.root()));
assert_eq!(exit_code, 1, "ambiguous prefix must exit 1: {v}");
assert!(!v["ok"].as_bool().unwrap_or(true), "ok must be false: {v}");
assert_eq!(v["type"].as_str().unwrap_or(""), "error");
assert_eq!(
v["error"]["code"].as_str().unwrap_or(""),
"ambiguous_job_id",
"expected error.code=ambiguous_job_id: {v}"
);
assert!(
!v["error"]["retryable"].as_bool().unwrap_or(true),
"retryable must be false: {v}"
);
}
#[test]
fn prefix_lookup_cross_command() {
let h = TestHarness::new();
let run_v = h.run(&["run", "--snapshot-after", "0", "sleep", "60"]);
let full_id = run_v["job_id"]
.as_str()
.expect("job_id missing")
.to_string();
let prefix = &full_id[..10];
let tail_v = h.run(&["tail", prefix]);
assert_envelope(&tail_v, "tail", true);
assert_eq!(tail_v["job_id"].as_str().unwrap_or(""), full_id);
let (wait_v, _) = run_cmd_raw(&["wait", "--timeout-ms", "100", prefix], Some(h.root()));
assert_eq!(wait_v["job_id"].as_str().unwrap_or(""), full_id);
let kill_v = h.run(&["kill", prefix]);
assert_envelope(&kill_v, "kill", true);
assert_eq!(kill_v["job_id"].as_str().unwrap_or(""), full_id);
}
#[test]
fn delete_prefix_resolves_unique_match() {
let h = TestHarness::new();
let run_v = h.run(&["run", "--snapshot-after", "0", "echo", "delete_prefix_test"]);
let full_id = run_v["job_id"]
.as_str()
.expect("job_id missing")
.to_string();
h.run(&["wait", &full_id]);
let prefix = &full_id[..10];
let (v, exit_code) = run_cmd_raw(&["delete", prefix], Some(h.root()));
assert_eq!(exit_code, 0, "delete with prefix must succeed: {v}");
assert_envelope(&v, "delete", true);
assert_eq!(
v["deleted"].as_u64().unwrap_or(0),
1,
"expected deleted=1: {v}"
);
let jobs = v["jobs"].as_array().expect("jobs must be array");
assert_eq!(jobs.len(), 1);
assert_eq!(
jobs[0]["job_id"].as_str().unwrap_or(""),
full_id,
"job_id in response must be the resolved full ID"
);
let (status_v, _) = run_cmd_raw(&["status", &full_id], Some(h.root()));
assert_eq!(
status_v["error"]["code"].as_str().unwrap_or(""),
"job_not_found",
"job must not exist after delete: {status_v}"
);
}
#[test]
fn delete_ambiguous_prefix_returns_error() {
let h = TestHarness::new();
let run1 = h.run(&["run", "--snapshot-after", "0", "echo", "del_amb_1"]);
let id1 = run1["job_id"].as_str().expect("job_id").to_string();
let run2 = h.run(&["run", "--snapshot-after", "0", "echo", "del_amb_2"]);
let id2 = run2["job_id"].as_str().expect("job_id").to_string();
h.run(&["wait", &id1]);
h.run(&["wait", &id2]);
let shared_len = id1
.chars()
.zip(id2.chars())
.take_while(|(a, b)| a == b)
.count();
if shared_len == 0 {
return;
}
let prefix = &id1[..shared_len];
let (v, exit_code) = run_cmd_raw(&["delete", prefix], Some(h.root()));
assert_eq!(exit_code, 1, "ambiguous delete prefix must exit 1: {v}");
assert!(!v["ok"].as_bool().unwrap_or(true), "ok must be false: {v}");
assert_eq!(v["type"].as_str().unwrap_or(""), "error");
assert_eq!(
v["error"]["code"].as_str().unwrap_or(""),
"ambiguous_job_id",
"error code must be ambiguous_job_id: {v}"
);
assert!(
!v["error"]["retryable"].as_bool().unwrap_or(true),
"retryable must be false: {v}"
);
}
fn run_completion(shell: &str, args: &[&str], root: Option<&str>) -> (String, i32) {
let bin = binary();
let mut cmd = Command::new(&bin);
cmd.args(args);
if let Some(r) = root {
cmd.env("AGENT_EXEC_ROOT", r);
}
let output = cmd.output().expect("run binary");
let stdout = String::from_utf8_lossy(&output.stdout).to_string();
let code = output.status.code().unwrap_or(-1);
let _ = shell; (stdout, code)
}
fn get_dynamic_candidates(
root: &str,
subcommand: &str,
word_index: usize,
partial: &str,
) -> Vec<String> {
let bin = binary();
let mut cmd = Command::new(&bin);
cmd.arg(bin.to_str().unwrap());
cmd.arg("--");
cmd.arg("agent-exec");
cmd.arg(subcommand);
cmd.arg(partial);
cmd.env("COMPLETE", "bash");
cmd.env("AGENT_EXEC_ROOT", root);
cmd.env("_CLAP_COMPLETE_INDEX", word_index.to_string());
let output = cmd.output().expect("run binary for dynamic completion");
let stdout = String::from_utf8_lossy(&output.stdout).to_string();
stdout
.lines()
.map(|l| l.trim().to_string())
.filter(|l| !l.is_empty())
.collect()
}
#[test]
fn test_completions_bash_outputs_nonempty_script() {
let (stdout, code) = run_completion("bash", &["completions", "bash"], None);
assert_eq!(code, 0, "completions bash must exit 0");
assert!(
!stdout.trim().is_empty(),
"completions bash must produce non-empty output"
);
assert!(
stdout.contains("_agent") || stdout.contains("agent"),
"bash script must reference agent-exec: {stdout}"
);
}
#[test]
fn test_completions_zsh_outputs_nonempty_script() {
let (stdout, code) = run_completion("zsh", &["completions", "zsh"], None);
assert_eq!(code, 0);
assert!(!stdout.trim().is_empty());
}
#[test]
fn test_completions_fish_outputs_nonempty_script() {
let (stdout, code) = run_completion("fish", &["completions", "fish"], None);
assert_eq!(code, 0);
assert!(!stdout.trim().is_empty());
}
#[test]
fn test_completions_powershell_outputs_nonempty_script() {
let (stdout, code) = run_completion("powershell", &["completions", "powershell"], None);
assert_eq!(code, 0);
assert!(!stdout.trim().is_empty());
}
#[test]
fn test_completions_invalid_shell_exits_with_code_2() {
let (_, code) = run_completion("invalid", &["completions", "invalidshell"], None);
assert_eq!(code, 2, "invalid shell must produce a usage error (exit 2)");
}
#[test]
fn test_dynamic_completion_all_jobs_for_status() {
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path().to_str().unwrap();
for (id, state) in &[
("01AAAAAAAAAAAAAAAAAAAAAAAAA", "running"),
("01BBBBBBBBBBBBBBBBBBBBBBBBB", "exited"),
] {
std::fs::create_dir_all(tmp.path().join(id)).unwrap();
std::fs::write(
tmp.path().join(id).join("state.json"),
format!("{{\"state\":\"{state}\",\"job_id\":\"{id}\"}}"),
)
.unwrap();
}
let candidates = get_dynamic_candidates(root, "status", 2, "");
let ids: Vec<_> = candidates.iter().filter(|c| c.starts_with("01")).collect();
assert!(
ids.iter().any(|s| s.contains("01AAA")),
"status should include running jobs: {candidates:?}"
);
assert!(
ids.iter().any(|s| s.contains("01BBB")),
"status should include exited jobs: {candidates:?}"
);
}
#[test]
fn test_dynamic_completion_running_only_for_kill() {
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path().to_str().unwrap();
for (id, state) in &[
("01AAAAAAAAAAAAAAAAAAAAAAAAA", "running"),
("01BBBBBBBBBBBBBBBBBBBBBBBBB", "exited"),
] {
std::fs::create_dir_all(tmp.path().join(id)).unwrap();
std::fs::write(
tmp.path().join(id).join("state.json"),
format!("{{\"state\":\"{state}\",\"job_id\":\"{id}\"}}"),
)
.unwrap();
}
let candidates = get_dynamic_candidates(root, "kill", 2, "");
let ids: Vec<_> = candidates.iter().filter(|c| c.starts_with("01")).collect();
assert!(
ids.iter().any(|s| s.contains("01AAA")),
"kill should include running job: {candidates:?}"
);
assert!(
!ids.iter().any(|s| s.contains("01BBB")),
"kill should exclude exited job: {candidates:?}"
);
}
#[test]
fn test_dynamic_completion_empty_when_root_missing() {
let candidates = get_dynamic_candidates("/nonexistent/path", "status", 2, "");
let job_ids: Vec<_> = candidates.iter().filter(|c| c.starts_with("01")).collect();
assert!(
job_ids.is_empty(),
"missing root should yield no job IDs: {candidates:?}"
);
}
fn get_dynamic_candidates_via_root_arg(root: &str, subcommand: &str, partial: &str) -> Vec<String> {
let bin = binary();
let mut cmd = Command::new(&bin);
cmd.arg(bin.to_str().unwrap());
cmd.arg("--");
cmd.arg("agent-exec");
cmd.arg("--root");
cmd.arg(root);
cmd.arg(subcommand);
cmd.arg(partial);
cmd.env("COMPLETE", "bash");
cmd.env("_CLAP_COMPLETE_INDEX", "4");
let output = cmd
.output()
.expect("run binary for dynamic completion via --root arg");
let stdout = String::from_utf8_lossy(&output.stdout).to_string();
stdout
.lines()
.map(|l| l.trim().to_string())
.filter(|l| !l.is_empty())
.collect()
}
#[test]
fn test_dynamic_completion_with_root_arg_returns_jobs_from_that_path() {
let tmp = tempfile::tempdir().unwrap();
let root = tmp.path().to_str().unwrap();
let job_id = "01CUSTOMROOTJOBAAAAAAAAAAAAA";
std::fs::create_dir_all(tmp.path().join(job_id)).unwrap();
std::fs::write(
tmp.path().join(job_id).join("state.json"),
format!("{{\"state\":\"running\",\"job_id\":\"{job_id}\"}}"),
)
.unwrap();
let candidates = get_dynamic_candidates_via_root_arg(root, "status", "");
let ids: Vec<_> = candidates
.iter()
.filter(|c| c.starts_with("01CUSTOM"))
.collect();
assert!(
!ids.is_empty(),
"--root argv resolution: expected job {job_id} in candidates: {candidates:?}"
);
}