Skip to main content

codetether_agent/cli/
run.rs

1//! Non-interactive run command
2
3use super::RunArgs;
4use crate::bus::{AgentBus, relay::ProtocolRelayRuntime, relay::RelayAgentProfile};
5use crate::config::Config;
6use crate::provider::{ContentPart, Message, Role};
7use crate::session::Session;
8use anyhow::Result;
9use serde::Serialize;
10use std::collections::HashMap;
11
12const AUTOCHAT_MAX_AGENTS: usize = 8;
13const AUTOCHAT_DEFAULT_AGENTS: usize = 3;
14const AUTOCHAT_MAX_ROUNDS: usize = 3;
15const AUTOCHAT_QUICK_DEMO_TASK: &str = "Introduce yourselves with your role/personality, then relay one concrete implementation plan with clear next handoffs.";
16const GO_DEFAULT_MODEL: &str = "minimax/MiniMax-M2.5";
17
18#[derive(Debug, Clone)]
19struct RelayProfile {
20    name: String,
21    instructions: String,
22    capabilities: Vec<String>,
23}
24
25#[derive(Debug, Serialize)]
26struct AutochatCliResult {
27    status: String,
28    relay_id: String,
29    model: String,
30    agent_count: usize,
31    turns: usize,
32    agents: Vec<String>,
33    final_handoff: String,
34    summary: String,
35    failure: Option<String>,
36}
37
38pub async fn execute(args: RunArgs) -> Result<()> {
39    let message = args.message.trim();
40
41    if message.is_empty() {
42        anyhow::bail!("You must provide a message");
43    }
44
45    tracing::info!("Running with message: {}", message);
46
47    // Load configuration
48    let config = Config::load().await.unwrap_or_default();
49
50    // Protocol-first relay aliases in CLI:
51    // - /go [count] <task>
52    // - /autochat [count] <task>
53    let easy_go_requested = is_easy_go_command(message);
54    let normalized = normalize_cli_go_command(message);
55    if let Some(rest) = command_with_optional_args(&normalized, "/autochat") {
56        let Some((agent_count, task)) = parse_autochat_args(rest) else {
57            anyhow::bail!(
58                "Usage: /autochat [count] <task>\nEasy mode: /go <task>\ncount range: 2-{} (default: {})",
59                AUTOCHAT_MAX_AGENTS,
60                AUTOCHAT_DEFAULT_AGENTS
61            );
62        };
63
64        if !(2..=AUTOCHAT_MAX_AGENTS).contains(&agent_count) {
65            anyhow::bail!(
66                "Invalid relay size {}. count must be between 2 and {}",
67                agent_count,
68                AUTOCHAT_MAX_AGENTS
69            );
70        }
71
72        let model = resolve_autochat_model(
73            args.model.as_deref(),
74            std::env::var("CODETETHER_DEFAULT_MODEL").ok().as_deref(),
75            config.default_model.as_deref(),
76            easy_go_requested,
77        );
78
79        let relay_result = run_protocol_first_relay(agent_count, task, &model).await?;
80        match args.format.as_str() {
81            "json" => println!("{}", serde_json::to_string_pretty(&relay_result)?),
82            _ => {
83                println!("{}", relay_result.summary);
84                if let Some(failure) = &relay_result.failure {
85                    eprintln!("\nFailure detail: {}", failure);
86                }
87                eprintln!(
88                    "\n[Relay: {} | Model: {}]",
89                    relay_result.relay_id, relay_result.model
90                );
91            }
92        }
93        return Ok(());
94    }
95
96    // Create or continue session.
97    let mut session = if let Some(session_id) = args.session.clone() {
98        tracing::info!("Continuing session: {}", session_id);
99        if let Some(oc_id) = session_id.strip_prefix("opencode_") {
100            if let Some(storage) = crate::opencode::OpenCodeStorage::new() {
101                Session::from_opencode(oc_id, &storage).await?
102            } else {
103                anyhow::bail!("OpenCode storage not available")
104            }
105        } else {
106            Session::load(&session_id).await?
107        }
108    } else if args.continue_session {
109        let workspace_dir = std::env::current_dir().unwrap_or_default();
110        match Session::last_for_directory(Some(&workspace_dir)).await {
111            Ok(s) => {
112                tracing::info!(
113                    session_id = %s.id,
114                    workspace = %workspace_dir.display(),
115                    "Continuing last workspace session"
116                );
117                s
118            }
119            Err(_) => {
120                // Fallback: try to resume from OpenCode session
121                match Session::last_opencode_for_directory(&workspace_dir).await {
122                    Ok(s) => {
123                        tracing::info!(
124                            session_id = %s.id,
125                            workspace = %workspace_dir.display(),
126                            "Resuming from OpenCode session"
127                        );
128                        s
129                    }
130                    Err(_) => {
131                        let s = Session::new().await?;
132                        tracing::info!(
133                            session_id = %s.id,
134                            workspace = %workspace_dir.display(),
135                            "No workspace session found; created new session"
136                        );
137                        s
138                    }
139                }
140            }
141        }
142    } else {
143        let s = Session::new().await?;
144        tracing::info!("Created new session: {}", s.id);
145        s
146    };
147
148    // Set model: CLI arg > env var > config default
149    let model = args
150        .model
151        .or_else(|| std::env::var("CODETETHER_DEFAULT_MODEL").ok())
152        .or(config.default_model);
153
154    if let Some(model) = model {
155        tracing::info!("Using model: {}", model);
156        session.metadata.model = Some(model);
157    }
158
159    // Execute the prompt
160    let result = session.prompt(message).await?;
161
162    // Output based on format
163    match args.format.as_str() {
164        "json" => {
165            println!("{}", serde_json::to_string_pretty(&result)?);
166        }
167        _ => {
168            println!("{}", result.text);
169            // Show session ID for continuation
170            eprintln!(
171                "\n[Session: {} | Continue with: codetether run -c \"...\"]",
172                session.id
173            );
174        }
175    }
176
177    Ok(())
178}
179
180fn command_with_optional_args<'a>(input: &'a str, command: &str) -> Option<&'a str> {
181    let trimmed = input.trim();
182    let rest = trimmed.strip_prefix(command)?;
183
184    if rest.is_empty() {
185        return Some("");
186    }
187
188    let first = rest.chars().next()?;
189    if first.is_whitespace() {
190        Some(rest.trim())
191    } else {
192        None
193    }
194}
195
196fn normalize_cli_go_command(input: &str) -> String {
197    let trimmed = input.trim();
198    if trimmed.is_empty() || !trimmed.starts_with('/') {
199        return trimmed.to_string();
200    }
201
202    let mut parts = trimmed.splitn(2, char::is_whitespace);
203    let command = parts.next().unwrap_or("");
204    let args = parts.next().unwrap_or("").trim();
205
206    match command.to_ascii_lowercase().as_str() {
207        "/go" | "/team" => {
208            if args.is_empty() {
209                format!(
210                    "/autochat {} {}",
211                    AUTOCHAT_DEFAULT_AGENTS, AUTOCHAT_QUICK_DEMO_TASK
212                )
213            } else {
214                let mut count_and_task = args.splitn(2, char::is_whitespace);
215                let first = count_and_task.next().unwrap_or("");
216                if let Ok(count) = first.parse::<usize>() {
217                    let task = count_and_task.next().unwrap_or("").trim();
218                    if task.is_empty() {
219                        format!("/autochat {count} {AUTOCHAT_QUICK_DEMO_TASK}")
220                    } else {
221                        format!("/autochat {count} {task}")
222                    }
223                } else {
224                    format!("/autochat {} {args}", AUTOCHAT_DEFAULT_AGENTS)
225                }
226            }
227        }
228        _ => trimmed.to_string(),
229    }
230}
231
232fn is_easy_go_command(input: &str) -> bool {
233    let command = input
234        .trim_start()
235        .split_whitespace()
236        .next()
237        .unwrap_or("")
238        .to_ascii_lowercase();
239
240    matches!(command.as_str(), "/go" | "/team")
241}
242
243fn parse_autochat_args(rest: &str) -> Option<(usize, &str)> {
244    let rest = rest.trim();
245    if rest.is_empty() {
246        return None;
247    }
248
249    let mut parts = rest.splitn(2, char::is_whitespace);
250    let first = parts.next().unwrap_or("").trim();
251    if first.is_empty() {
252        return None;
253    }
254
255    if let Ok(count) = first.parse::<usize>() {
256        let task = parts.next().unwrap_or("").trim();
257        if task.is_empty() {
258            Some((count, AUTOCHAT_QUICK_DEMO_TASK))
259        } else {
260            Some((count, task))
261        }
262    } else {
263        Some((AUTOCHAT_DEFAULT_AGENTS, rest))
264    }
265}
266
267fn resolve_autochat_model(
268    cli_model: Option<&str>,
269    env_model: Option<&str>,
270    config_model: Option<&str>,
271    easy_go_requested: bool,
272) -> String {
273    if let Some(model) = cli_model.filter(|value| !value.trim().is_empty()) {
274        return model.to_string();
275    }
276    if easy_go_requested {
277        return GO_DEFAULT_MODEL.to_string();
278    }
279    if let Some(model) = env_model.filter(|value| !value.trim().is_empty()) {
280        return model.to_string();
281    }
282    if let Some(model) = config_model.filter(|value| !value.trim().is_empty()) {
283        return model.to_string();
284    }
285    "zai/glm-5".to_string()
286}
287
288fn build_relay_profiles(count: usize) -> Vec<RelayProfile> {
289    let templates = [
290        (
291            "planner",
292            "Decompose objectives into precise, sequenced steps.",
293            "planning",
294        ),
295        (
296            "researcher",
297            "Validate assumptions, surface edge cases, and gather critical evidence.",
298            "research",
299        ),
300        (
301            "coder",
302            "Propose concrete implementation details and practical code-level direction.",
303            "implementation",
304        ),
305        (
306            "reviewer",
307            "Challenge weak spots, enforce quality, and reduce regressions.",
308            "review",
309        ),
310        (
311            "tester",
312            "Design verification strategy, tests, and failure-oriented checks.",
313            "testing",
314        ),
315        (
316            "integrator",
317            "Synthesize contributions into a coherent delivery plan.",
318            "integration",
319        ),
320        (
321            "skeptic",
322            "Stress-test confidence and call out hidden risks early.",
323            "risk-analysis",
324        ),
325        (
326            "summarizer",
327            "Produce concise, actionable final guidance.",
328            "summarization",
329        ),
330    ];
331
332    let mut profiles = Vec::with_capacity(count);
333    for idx in 0..count {
334        let (slug, mission, specialty) = templates[idx % templates.len()];
335        let name = if idx < templates.len() {
336            format!("auto-{slug}")
337        } else {
338            format!("auto-{slug}-{}", idx + 1)
339        };
340
341        let instructions = format!(
342            "You are @{name}.\n\
343             Specialty: {specialty}. {mission}\n\n\
344             This is a protocol-first relay conversation. Treat the incoming handoff as authoritative context.\n\
345             Keep your response concise, concrete, and useful for the next specialist.\n\
346             Include one clear recommendation for what the next agent should do.",
347        );
348        let capabilities = vec![
349            specialty.to_string(),
350            "relay".to_string(),
351            "context-handoff".to_string(),
352            "autochat".to_string(),
353        ];
354
355        profiles.push(RelayProfile {
356            name,
357            instructions,
358            capabilities,
359        });
360    }
361    profiles
362}
363
364fn truncate_with_ellipsis(value: &str, max_chars: usize) -> String {
365    if max_chars == 0 {
366        return String::new();
367    }
368
369    let mut chars = value.chars();
370    let mut output = String::new();
371    for _ in 0..max_chars {
372        if let Some(ch) = chars.next() {
373            output.push(ch);
374        } else {
375            return value.to_string();
376        }
377    }
378
379    if chars.next().is_some() {
380        format!("{output}...")
381    } else {
382        output
383    }
384}
385
386fn normalize_for_convergence(text: &str) -> String {
387    let mut normalized = String::with_capacity(text.len().min(512));
388    let mut last_was_space = false;
389
390    for ch in text.chars() {
391        if ch.is_ascii_alphanumeric() {
392            normalized.push(ch.to_ascii_lowercase());
393            last_was_space = false;
394        } else if ch.is_whitespace() && !last_was_space {
395            normalized.push(' ');
396            last_was_space = true;
397        }
398
399        if normalized.len() >= 280 {
400            break;
401        }
402    }
403
404    normalized.trim().to_string()
405}
406
407async fn run_protocol_first_relay(
408    agent_count: usize,
409    task: &str,
410    model_ref: &str,
411) -> Result<AutochatCliResult> {
412    let bus = AgentBus::new().into_arc();
413    let relay = ProtocolRelayRuntime::new(bus);
414
415    let profiles = build_relay_profiles(agent_count);
416    let relay_profiles: Vec<RelayAgentProfile> = profiles
417        .iter()
418        .map(|profile| RelayAgentProfile {
419            name: profile.name.clone(),
420            capabilities: profile.capabilities.clone(),
421        })
422        .collect();
423
424    let ordered_agents: Vec<String> = profiles
425        .iter()
426        .map(|profile| profile.name.clone())
427        .collect();
428    let mut sessions: HashMap<String, Session> = HashMap::new();
429
430    for profile in &profiles {
431        let mut session = Session::new().await?;
432        session.metadata.model = Some(model_ref.to_string());
433        session.agent = profile.name.clone();
434        session.add_message(Message {
435            role: Role::System,
436            content: vec![ContentPart::Text {
437                text: profile.instructions.clone(),
438            }],
439        });
440        sessions.insert(profile.name.clone(), session);
441    }
442
443    if ordered_agents.len() < 2 {
444        anyhow::bail!("Autochat needs at least 2 agents to relay.");
445    }
446
447    relay.register_agents(&relay_profiles);
448
449    let mut baton = format!(
450        "Task:\n{task}\n\nStart by proposing an execution strategy and one immediate next step."
451    );
452    let mut previous_normalized: Option<String> = None;
453    let mut convergence_hits = 0usize;
454    let mut turns = 0usize;
455    let mut status = "max_rounds_reached".to_string();
456    let mut failure_note: Option<String> = None;
457
458    'relay_loop: for round in 1..=AUTOCHAT_MAX_ROUNDS {
459        for idx in 0..ordered_agents.len() {
460            let to = ordered_agents[idx].clone();
461            let from = if idx == 0 {
462                if round == 1 {
463                    "user".to_string()
464                } else {
465                    ordered_agents[ordered_agents.len() - 1].clone()
466                }
467            } else {
468                ordered_agents[idx - 1].clone()
469            };
470
471            turns += 1;
472            relay.send_handoff(&from, &to, &baton);
473
474            let Some(mut session) = sessions.remove(&to) else {
475                status = "agent_error".to_string();
476                failure_note = Some(format!("Relay agent @{to} session was unavailable."));
477                break 'relay_loop;
478            };
479
480            let output = match session.prompt(&baton).await {
481                Ok(response) => response.text,
482                Err(err) => {
483                    status = "agent_error".to_string();
484                    failure_note = Some(format!("Relay agent @{to} failed: {err}"));
485                    sessions.insert(to, session);
486                    break 'relay_loop;
487                }
488            };
489
490            sessions.insert(to.clone(), session);
491
492            let normalized = normalize_for_convergence(&output);
493            if previous_normalized.as_deref() == Some(normalized.as_str()) {
494                convergence_hits += 1;
495            } else {
496                convergence_hits = 0;
497            }
498            previous_normalized = Some(normalized);
499
500            baton = format!(
501                "Relay task:\n{task}\n\nIncoming handoff from @{to}:\n{}\n\nContinue the work from this handoff. Keep your response focused and provide one concrete next-step instruction for the next agent.",
502                truncate_with_ellipsis(&output, 3_500)
503            );
504
505            if convergence_hits >= 2 {
506                status = "converged".to_string();
507                break 'relay_loop;
508            }
509        }
510    }
511
512    relay.shutdown_agents(&ordered_agents);
513
514    let mut summary = format!(
515        "Autochat complete ({status}) — relay {} with {} agents over {} turns.\n\nFinal relay handoff:\n{}",
516        relay.relay_id(),
517        ordered_agents.len(),
518        turns,
519        truncate_with_ellipsis(&baton, 4_000)
520    );
521    if let Some(note) = &failure_note {
522        summary.push_str(&format!("\n\nFailure detail: {note}"));
523    }
524
525    Ok(AutochatCliResult {
526        status,
527        relay_id: relay.relay_id().to_string(),
528        model: model_ref.to_string(),
529        agent_count: ordered_agents.len(),
530        turns,
531        agents: ordered_agents,
532        final_handoff: baton,
533        summary,
534        failure: failure_note,
535    })
536}
537
538#[cfg(test)]
539mod tests {
540    use super::{
541        AUTOCHAT_QUICK_DEMO_TASK, command_with_optional_args, is_easy_go_command,
542        normalize_cli_go_command, parse_autochat_args, resolve_autochat_model,
543    };
544
545    #[test]
546    fn normalize_go_maps_to_autochat_with_count_and_task() {
547        assert_eq!(
548            normalize_cli_go_command("/go 4 build protocol relay"),
549            "/autochat 4 build protocol relay"
550        );
551    }
552
553    #[test]
554    fn normalize_go_count_only_uses_demo_task() {
555        assert_eq!(
556            normalize_cli_go_command("/go 4"),
557            format!("/autochat 4 {AUTOCHAT_QUICK_DEMO_TASK}")
558        );
559    }
560
561    #[test]
562    fn parse_autochat_args_supports_default_count() {
563        assert_eq!(
564            parse_autochat_args("build a relay").expect("valid args"),
565            (3, "build a relay")
566        );
567    }
568
569    #[test]
570    fn parse_autochat_args_supports_explicit_count() {
571        assert_eq!(
572            parse_autochat_args("4 build a relay").expect("valid args"),
573            (4, "build a relay")
574        );
575    }
576
577    #[test]
578    fn command_with_optional_args_avoids_prefix_collision() {
579        assert_eq!(command_with_optional_args("/autochatty", "/autochat"), None);
580    }
581
582    #[test]
583    fn easy_go_detection_handles_aliases() {
584        assert!(is_easy_go_command("/go 4 task"));
585        assert!(is_easy_go_command("/team 4 task"));
586        assert!(!is_easy_go_command("/autochat 4 task"));
587    }
588
589    #[test]
590    fn easy_go_defaults_to_minimax_when_model_not_set() {
591        assert_eq!(
592            resolve_autochat_model(None, None, Some("zai/glm-5"), true),
593            "minimax/MiniMax-M2.5"
594        );
595    }
596
597    #[test]
598    fn explicit_model_wins_over_easy_go_default() {
599        assert_eq!(
600            resolve_autochat_model(Some("zai/glm-5"), None, None, true),
601            "zai/glm-5"
602        );
603    }
604}