1use super::RunArgs;
4use crate::bus::{AgentBus, relay::ProtocolRelayRuntime, relay::RelayAgentProfile};
5use crate::config::Config;
6use crate::provider::{ContentPart, Message, Role};
7use crate::session::Session;
8use anyhow::Result;
9use serde::{Deserialize, Serialize, de::DeserializeOwned};
10use std::collections::HashMap;
11
12const AUTOCHAT_MAX_AGENTS: usize = 8;
13const AUTOCHAT_DEFAULT_AGENTS: usize = 3;
14const AUTOCHAT_MAX_ROUNDS: usize = 3;
15const AUTOCHAT_MAX_DYNAMIC_SPAWNS: usize = 3;
16const AUTOCHAT_SPAWN_CHECK_MIN_CHARS: usize = 800;
17const AUTOCHAT_QUICK_DEMO_TASK: &str = "Self-organize into the right specialties for this task, then relay one concrete implementation plan with clear next handoffs.";
18const GO_DEFAULT_MODEL: &str = "minimax/MiniMax-M2.5";
19
20#[derive(Debug, Clone)]
21struct RelayProfile {
22 name: String,
23 instructions: String,
24 capabilities: Vec<String>,
25}
26
27#[derive(Debug, Clone, Deserialize)]
28struct PlannedRelayProfile {
29 #[serde(default)]
30 name: String,
31 #[serde(default)]
32 specialty: String,
33 #[serde(default)]
34 mission: String,
35 #[serde(default)]
36 capabilities: Vec<String>,
37}
38
39#[derive(Debug, Clone, Deserialize)]
40struct PlannedRelayResponse {
41 #[serde(default)]
42 profiles: Vec<PlannedRelayProfile>,
43}
44
45#[derive(Debug, Clone, Deserialize)]
46struct RelaySpawnDecision {
47 #[serde(default)]
48 spawn: bool,
49 #[serde(default)]
50 reason: String,
51 #[serde(default)]
52 profile: Option<PlannedRelayProfile>,
53}
54
55#[derive(Debug, Serialize)]
56struct AutochatCliResult {
57 status: String,
58 relay_id: String,
59 model: String,
60 agent_count: usize,
61 turns: usize,
62 agents: Vec<String>,
63 final_handoff: String,
64 summary: String,
65 failure: Option<String>,
66}
67
68fn slugify_label(value: &str) -> String {
69 let mut out = String::with_capacity(value.len());
70 let mut last_dash = false;
71
72 for ch in value.chars() {
73 let ch = ch.to_ascii_lowercase();
74 if ch.is_ascii_alphanumeric() {
75 out.push(ch);
76 last_dash = false;
77 } else if !last_dash {
78 out.push('-');
79 last_dash = true;
80 }
81 }
82
83 out.trim_matches('-').to_string()
84}
85
86fn sanitize_relay_agent_name(value: &str) -> String {
87 let raw = slugify_label(value);
88 let base = if raw.is_empty() {
89 "auto-specialist".to_string()
90 } else if raw.starts_with("auto-") {
91 raw
92 } else {
93 format!("auto-{raw}")
94 };
95
96 truncate_with_ellipsis(&base, 48)
97 .trim_end_matches("...")
98 .to_string()
99}
100
101fn unique_relay_agent_name(base: &str, existing: &[String]) -> String {
102 if !existing.iter().any(|name| name == base) {
103 return base.to_string();
104 }
105
106 let mut suffix = 2usize;
107 loop {
108 let candidate = format!("{base}-{suffix}");
109 if !existing.iter().any(|name| name == &candidate) {
110 return candidate;
111 }
112 suffix += 1;
113 }
114}
115
116fn relay_instruction_from_plan(name: &str, specialty: &str, mission: &str) -> String {
117 format!(
118 "You are @{name}.\n\
119 Specialty: {specialty}.\n\
120 Mission: {mission}\n\n\
121 This is a protocol-first relay conversation. Treat incoming handoffs as authoritative context.\n\
122 Keep responses concise, concrete, and useful for the next specialist.\n\
123 Include one clear recommendation for what the next agent should do.\n\
124 If the task is too large for the current team, explicitly call out missing specialties and handoff boundaries.",
125 )
126}
127
128fn build_runtime_profile_from_plan(
129 profile: PlannedRelayProfile,
130 existing: &[String],
131) -> Option<RelayProfile> {
132 let specialty = if profile.specialty.trim().is_empty() {
133 "generalist".to_string()
134 } else {
135 profile.specialty.trim().to_string()
136 };
137
138 let mission = if profile.mission.trim().is_empty() {
139 "Advance the relay with concrete next actions and clear handoffs.".to_string()
140 } else {
141 profile.mission.trim().to_string()
142 };
143
144 let base_name = if profile.name.trim().is_empty() {
145 format!("auto-{}", slugify_label(&specialty))
146 } else {
147 profile.name.trim().to_string()
148 };
149
150 let sanitized = sanitize_relay_agent_name(&base_name);
151 let name = unique_relay_agent_name(&sanitized, existing);
152 if name.trim().is_empty() {
153 return None;
154 }
155
156 let mut capabilities: Vec<String> = Vec::new();
157 let specialty_cap = slugify_label(&specialty);
158 if !specialty_cap.is_empty() {
159 capabilities.push(specialty_cap);
160 }
161
162 for capability in profile.capabilities {
163 let normalized = slugify_label(&capability);
164 if !normalized.is_empty() && !capabilities.contains(&normalized) {
165 capabilities.push(normalized);
166 }
167 }
168
169 for required in ["relay", "context-handoff", "autochat"] {
170 if !capabilities.iter().any(|capability| capability == required) {
171 capabilities.push(required.to_string());
172 }
173 }
174
175 Some(RelayProfile {
176 name: name.clone(),
177 instructions: relay_instruction_from_plan(&name, &specialty, &mission),
178 capabilities,
179 })
180}
181
182fn extract_json_payload<T: DeserializeOwned>(text: &str) -> Option<T> {
183 let trimmed = text.trim();
184 if let Ok(value) = serde_json::from_str::<T>(trimmed) {
185 return Some(value);
186 }
187
188 if let (Some(start), Some(end)) = (trimmed.find('{'), trimmed.rfind('}'))
189 && start < end
190 && let Ok(value) = serde_json::from_str::<T>(&trimmed[start..=end])
191 {
192 return Some(value);
193 }
194
195 if let (Some(start), Some(end)) = (trimmed.find('['), trimmed.rfind(']'))
196 && start < end
197 && let Ok(value) = serde_json::from_str::<T>(&trimmed[start..=end])
198 {
199 return Some(value);
200 }
201
202 None
203}
204
205fn resolve_provider_for_model_autochat(
206 registry: &std::sync::Arc<crate::provider::ProviderRegistry>,
207 model_ref: &str,
208) -> Option<(std::sync::Arc<dyn crate::provider::Provider>, String)> {
209 let (provider_name, model_name) = crate::provider::parse_model_string(model_ref);
210 if let Some(provider_name) = provider_name
211 && let Some(provider) = registry.get(provider_name)
212 {
213 return Some((provider, model_name.to_string()));
214 }
215
216 let fallbacks = [
217 "zai",
218 "openai",
219 "github-copilot",
220 "anthropic",
221 "openrouter",
222 "novita",
223 "moonshotai",
224 "google",
225 ];
226
227 for provider_name in fallbacks {
228 if let Some(provider) = registry.get(provider_name) {
229 return Some((provider, model_ref.to_string()));
230 }
231 }
232
233 registry
234 .list()
235 .first()
236 .copied()
237 .and_then(|name| registry.get(name))
238 .map(|provider| (provider, model_ref.to_string()))
239}
240
241async fn plan_relay_profiles_with_registry(
242 task: &str,
243 model_ref: &str,
244 requested_agents: usize,
245 registry: &std::sync::Arc<crate::provider::ProviderRegistry>,
246) -> Option<Vec<RelayProfile>> {
247 let (provider, model_name) = resolve_provider_for_model_autochat(registry, model_ref)?;
248 let requested_agents = requested_agents.clamp(2, AUTOCHAT_MAX_AGENTS);
249
250 let request = crate::provider::CompletionRequest {
251 model: model_name,
252 messages: vec![
253 crate::provider::Message {
254 role: crate::provider::Role::System,
255 content: vec![crate::provider::ContentPart::Text {
256 text: "You are a relay-team architect. Return ONLY valid JSON.".to_string(),
257 }],
258 },
259 crate::provider::Message {
260 role: crate::provider::Role::User,
261 content: vec![crate::provider::ContentPart::Text {
262 text: format!(
263 "Task:\n{task}\n\nDesign a task-specific relay team.\n\
264 Respond with JSON object only:\n\
265 {{\n \"profiles\": [\n {{\"name\":\"auto-...\",\"specialty\":\"...\",\"mission\":\"...\",\"capabilities\":[\"...\"]}}\n ]\n}}\n\
266 Requirements:\n\
267 - Return {} profiles\n\
268 - Names must be short kebab-case\n\
269 - Capabilities must be concise skill tags\n\
270 - Missions should be concrete and handoff-friendly",
271 requested_agents
272 ),
273 }],
274 },
275 ],
276 tools: Vec::new(),
277 temperature: Some(1.0),
278 top_p: Some(0.9),
279 max_tokens: Some(1200),
280 stop: Vec::new(),
281 };
282
283 let response = provider.complete(request).await.ok()?;
284 let text = response
285 .message
286 .content
287 .iter()
288 .filter_map(|part| match part {
289 crate::provider::ContentPart::Text { text }
290 | crate::provider::ContentPart::Thinking { text } => Some(text.as_str()),
291 _ => None,
292 })
293 .collect::<Vec<_>>()
294 .join("\n");
295
296 let planned = extract_json_payload::<PlannedRelayResponse>(&text)?;
297 let mut existing = Vec::<String>::new();
298 let mut runtime = Vec::<RelayProfile>::new();
299
300 for profile in planned.profiles.into_iter().take(AUTOCHAT_MAX_AGENTS) {
301 if let Some(runtime_profile) = build_runtime_profile_from_plan(profile, &existing) {
302 existing.push(runtime_profile.name.clone());
303 runtime.push(runtime_profile);
304 }
305 }
306
307 if runtime.len() >= 2 {
308 Some(runtime)
309 } else {
310 None
311 }
312}
313
314async fn decide_dynamic_spawn_with_registry(
315 task: &str,
316 model_ref: &str,
317 latest_output: &str,
318 round: usize,
319 ordered_agents: &[String],
320 registry: &std::sync::Arc<crate::provider::ProviderRegistry>,
321) -> Option<(RelayProfile, String)> {
322 let (provider, model_name) = resolve_provider_for_model_autochat(registry, model_ref)?;
323 let team = ordered_agents
324 .iter()
325 .map(|name| format!("@{name}"))
326 .collect::<Vec<_>>()
327 .join(", ");
328 let output_excerpt = truncate_with_ellipsis(latest_output, 2200);
329
330 let request = crate::provider::CompletionRequest {
331 model: model_name,
332 messages: vec![
333 crate::provider::Message {
334 role: crate::provider::Role::System,
335 content: vec![crate::provider::ContentPart::Text {
336 text: "You are a relay scaling controller. Return ONLY valid JSON.".to_string(),
337 }],
338 },
339 crate::provider::Message {
340 role: crate::provider::Role::User,
341 content: vec![crate::provider::ContentPart::Text {
342 text: format!(
343 "Task:\n{task}\n\nRound: {round}\nCurrent team: {team}\n\
344 Latest handoff excerpt:\n{output_excerpt}\n\n\
345 Decide whether the team needs one additional specialist right now.\n\
346 Respond with JSON object only:\n\
347 {{\n \"spawn\": true|false,\n \"reason\": \"...\",\n \"profile\": {{\"name\":\"auto-...\",\"specialty\":\"...\",\"mission\":\"...\",\"capabilities\":[\"...\"]}}\n}}\n\
348 If spawn=false, profile may be null or omitted."
349 ),
350 }],
351 },
352 ],
353 tools: Vec::new(),
354 temperature: Some(1.0),
355 top_p: Some(0.9),
356 max_tokens: Some(420),
357 stop: Vec::new(),
358 };
359
360 let response = provider.complete(request).await.ok()?;
361 let text = response
362 .message
363 .content
364 .iter()
365 .filter_map(|part| match part {
366 crate::provider::ContentPart::Text { text }
367 | crate::provider::ContentPart::Thinking { text } => Some(text.as_str()),
368 _ => None,
369 })
370 .collect::<Vec<_>>()
371 .join("\n");
372
373 let decision = extract_json_payload::<RelaySpawnDecision>(&text)?;
374 if !decision.spawn {
375 return None;
376 }
377
378 let profile = decision.profile?;
379 let runtime_profile = build_runtime_profile_from_plan(profile, ordered_agents)?;
380 let reason = if decision.reason.trim().is_empty() {
381 "Model requested additional specialist for task scope.".to_string()
382 } else {
383 decision.reason.trim().to_string()
384 };
385
386 Some((runtime_profile, reason))
387}
388
389pub async fn execute(args: RunArgs) -> Result<()> {
390 let message = args.message.trim();
391
392 if message.is_empty() {
393 anyhow::bail!("You must provide a message");
394 }
395
396 tracing::info!("Running with message: {}", message);
397
398 let config = Config::load().await.unwrap_or_default();
400
401 let easy_go_requested = is_easy_go_command(message);
405 let normalized = normalize_cli_go_command(message);
406 if let Some(rest) = command_with_optional_args(&normalized, "/autochat") {
407 let Some((agent_count, task)) = parse_autochat_args(rest) else {
408 anyhow::bail!(
409 "Usage: /autochat [count] <task>\nEasy mode: /go <task>\ncount range: 2-{} (default: {})",
410 AUTOCHAT_MAX_AGENTS,
411 AUTOCHAT_DEFAULT_AGENTS
412 );
413 };
414
415 if !(2..=AUTOCHAT_MAX_AGENTS).contains(&agent_count) {
416 anyhow::bail!(
417 "Invalid relay size {}. count must be between 2 and {}",
418 agent_count,
419 AUTOCHAT_MAX_AGENTS
420 );
421 }
422
423 let model = resolve_autochat_model(
424 args.model.as_deref(),
425 std::env::var("CODETETHER_DEFAULT_MODEL").ok().as_deref(),
426 config.default_model.as_deref(),
427 easy_go_requested,
428 );
429
430 let relay_result = run_protocol_first_relay(agent_count, task, &model).await?;
431 match args.format.as_str() {
432 "json" => println!("{}", serde_json::to_string_pretty(&relay_result)?),
433 _ => {
434 println!("{}", relay_result.summary);
435 if let Some(failure) = &relay_result.failure {
436 eprintln!("\nFailure detail: {}", failure);
437 }
438 eprintln!(
439 "\n[Relay: {} | Model: {}]",
440 relay_result.relay_id, relay_result.model
441 );
442 }
443 }
444 return Ok(());
445 }
446
447 let mut session = if let Some(session_id) = args.session.clone() {
449 tracing::info!("Continuing session: {}", session_id);
450 if let Some(oc_id) = session_id.strip_prefix("opencode_") {
451 if let Some(storage) = crate::opencode::OpenCodeStorage::new() {
452 Session::from_opencode(oc_id, &storage).await?
453 } else {
454 anyhow::bail!("OpenCode storage not available")
455 }
456 } else {
457 Session::load(&session_id).await?
458 }
459 } else if args.continue_session {
460 let workspace_dir = std::env::current_dir().unwrap_or_default();
461 match Session::last_for_directory(Some(&workspace_dir)).await {
462 Ok(s) => {
463 tracing::info!(
464 session_id = %s.id,
465 workspace = %workspace_dir.display(),
466 "Continuing last workspace session"
467 );
468 s
469 }
470 Err(_) => {
471 match Session::last_opencode_for_directory(&workspace_dir).await {
473 Ok(s) => {
474 tracing::info!(
475 session_id = %s.id,
476 workspace = %workspace_dir.display(),
477 "Resuming from OpenCode session"
478 );
479 s
480 }
481 Err(_) => {
482 let s = Session::new().await?;
483 tracing::info!(
484 session_id = %s.id,
485 workspace = %workspace_dir.display(),
486 "No workspace session found; created new session"
487 );
488 s
489 }
490 }
491 }
492 }
493 } else {
494 let s = Session::new().await?;
495 tracing::info!("Created new session: {}", s.id);
496 s
497 };
498
499 let model = args
501 .model
502 .or_else(|| std::env::var("CODETETHER_DEFAULT_MODEL").ok())
503 .or(config.default_model);
504
505 if let Some(model) = model {
506 tracing::info!("Using model: {}", model);
507 session.metadata.model = Some(model);
508 }
509
510 let result = session.prompt(message).await?;
512
513 match args.format.as_str() {
515 "json" => {
516 println!("{}", serde_json::to_string_pretty(&result)?);
517 }
518 _ => {
519 println!("{}", result.text);
520 eprintln!(
522 "\n[Session: {} | Continue with: codetether run -c \"...\"]",
523 session.id
524 );
525 }
526 }
527
528 Ok(())
529}
530
531fn command_with_optional_args<'a>(input: &'a str, command: &str) -> Option<&'a str> {
532 let trimmed = input.trim();
533 let rest = trimmed.strip_prefix(command)?;
534
535 if rest.is_empty() {
536 return Some("");
537 }
538
539 let first = rest.chars().next()?;
540 if first.is_whitespace() {
541 Some(rest.trim())
542 } else {
543 None
544 }
545}
546
547fn normalize_cli_go_command(input: &str) -> String {
548 let trimmed = input.trim();
549 if trimmed.is_empty() || !trimmed.starts_with('/') {
550 return trimmed.to_string();
551 }
552
553 let mut parts = trimmed.splitn(2, char::is_whitespace);
554 let command = parts.next().unwrap_or("");
555 let args = parts.next().unwrap_or("").trim();
556
557 match command.to_ascii_lowercase().as_str() {
558 "/go" | "/team" => {
559 if args.is_empty() {
560 format!(
561 "/autochat {} {}",
562 AUTOCHAT_DEFAULT_AGENTS, AUTOCHAT_QUICK_DEMO_TASK
563 )
564 } else {
565 let mut count_and_task = args.splitn(2, char::is_whitespace);
566 let first = count_and_task.next().unwrap_or("");
567 if let Ok(count) = first.parse::<usize>() {
568 let task = count_and_task.next().unwrap_or("").trim();
569 if task.is_empty() {
570 format!("/autochat {count} {AUTOCHAT_QUICK_DEMO_TASK}")
571 } else {
572 format!("/autochat {count} {task}")
573 }
574 } else {
575 format!("/autochat {} {args}", AUTOCHAT_DEFAULT_AGENTS)
576 }
577 }
578 }
579 _ => trimmed.to_string(),
580 }
581}
582
583fn is_easy_go_command(input: &str) -> bool {
584 let command = input
585 .trim_start()
586 .split_whitespace()
587 .next()
588 .unwrap_or("")
589 .to_ascii_lowercase();
590
591 matches!(command.as_str(), "/go" | "/team")
592}
593
594fn parse_autochat_args(rest: &str) -> Option<(usize, &str)> {
595 let rest = rest.trim();
596 if rest.is_empty() {
597 return None;
598 }
599
600 let mut parts = rest.splitn(2, char::is_whitespace);
601 let first = parts.next().unwrap_or("").trim();
602 if first.is_empty() {
603 return None;
604 }
605
606 if let Ok(count) = first.parse::<usize>() {
607 let task = parts.next().unwrap_or("").trim();
608 if task.is_empty() {
609 Some((count, AUTOCHAT_QUICK_DEMO_TASK))
610 } else {
611 Some((count, task))
612 }
613 } else {
614 Some((AUTOCHAT_DEFAULT_AGENTS, rest))
615 }
616}
617
618fn resolve_autochat_model(
619 cli_model: Option<&str>,
620 env_model: Option<&str>,
621 config_model: Option<&str>,
622 easy_go_requested: bool,
623) -> String {
624 if let Some(model) = cli_model.filter(|value| !value.trim().is_empty()) {
625 return model.to_string();
626 }
627 if easy_go_requested {
628 return GO_DEFAULT_MODEL.to_string();
629 }
630 if let Some(model) = env_model.filter(|value| !value.trim().is_empty()) {
631 return model.to_string();
632 }
633 if let Some(model) = config_model.filter(|value| !value.trim().is_empty()) {
634 return model.to_string();
635 }
636 "zai/glm-5".to_string()
637}
638
639fn build_relay_profiles(count: usize) -> Vec<RelayProfile> {
640 let mut profiles = Vec::with_capacity(count);
641 for idx in 0..count {
642 let name = format!("auto-agent-{}", idx + 1);
643
644 let instructions = format!(
645 "You are @{name}.\n\
646 Role policy: self-organize from task context and current handoff instead of assuming a fixed persona.\n\
647 Mission: advance the relay with concrete, high-signal next actions and clear ownership boundaries.\n\n\
648 This is a protocol-first relay conversation. Treat the incoming handoff as authoritative context.\n\
649 Keep your response concise, concrete, and useful for the next specialist.\n\
650 Include one clear recommendation for what the next agent should do.\n\
651 If the task scope is too large, explicitly call out missing specialties and handoff boundaries.",
652 );
653 let capabilities = vec![
654 "generalist".to_string(),
655 "self-organizing".to_string(),
656 "relay".to_string(),
657 "context-handoff".to_string(),
658 "autochat".to_string(),
659 ];
660
661 profiles.push(RelayProfile {
662 name,
663 instructions,
664 capabilities,
665 });
666 }
667 profiles
668}
669
670fn truncate_with_ellipsis(value: &str, max_chars: usize) -> String {
671 if max_chars == 0 {
672 return String::new();
673 }
674
675 let mut chars = value.chars();
676 let mut output = String::new();
677 for _ in 0..max_chars {
678 if let Some(ch) = chars.next() {
679 output.push(ch);
680 } else {
681 return value.to_string();
682 }
683 }
684
685 if chars.next().is_some() {
686 format!("{output}...")
687 } else {
688 output
689 }
690}
691
692fn normalize_for_convergence(text: &str) -> String {
693 let mut normalized = String::with_capacity(text.len().min(512));
694 let mut last_was_space = false;
695
696 for ch in text.chars() {
697 if ch.is_ascii_alphanumeric() {
698 normalized.push(ch.to_ascii_lowercase());
699 last_was_space = false;
700 } else if ch.is_whitespace() && !last_was_space {
701 normalized.push(' ');
702 last_was_space = true;
703 }
704
705 if normalized.len() >= 280 {
706 break;
707 }
708 }
709
710 normalized.trim().to_string()
711}
712
713async fn run_protocol_first_relay(
714 agent_count: usize,
715 task: &str,
716 model_ref: &str,
717) -> Result<AutochatCliResult> {
718 let bus = AgentBus::new().into_arc();
719 let relay = ProtocolRelayRuntime::new(bus);
720
721 let registry = crate::provider::ProviderRegistry::from_vault()
722 .await
723 .ok()
724 .map(std::sync::Arc::new);
725
726 let mut planner_used = false;
727 let profiles = if let Some(registry) = ®istry {
728 if let Some(planned) =
729 plan_relay_profiles_with_registry(task, model_ref, agent_count, registry).await
730 {
731 planner_used = true;
732 planned
733 } else {
734 build_relay_profiles(agent_count)
735 }
736 } else {
737 build_relay_profiles(agent_count)
738 };
739
740 let relay_profiles: Vec<RelayAgentProfile> = profiles
741 .iter()
742 .map(|profile| RelayAgentProfile {
743 name: profile.name.clone(),
744 capabilities: profile.capabilities.clone(),
745 })
746 .collect();
747
748 let mut ordered_agents: Vec<String> = profiles
749 .iter()
750 .map(|profile| profile.name.clone())
751 .collect();
752 let mut sessions: HashMap<String, Session> = HashMap::new();
753
754 for profile in &profiles {
755 let mut session = Session::new().await?;
756 session.metadata.model = Some(model_ref.to_string());
757 session.agent = profile.name.clone();
758 session.add_message(Message {
759 role: Role::System,
760 content: vec![ContentPart::Text {
761 text: profile.instructions.clone(),
762 }],
763 });
764 sessions.insert(profile.name.clone(), session);
765 }
766
767 if ordered_agents.len() < 2 {
768 anyhow::bail!("Autochat needs at least 2 agents to relay.");
769 }
770
771 relay.register_agents(&relay_profiles);
772
773 let mut baton = format!(
774 "Task:\n{task}\n\nStart by proposing an execution strategy and one immediate next step."
775 );
776 let mut previous_normalized: Option<String> = None;
777 let mut convergence_hits = 0usize;
778 let mut turns = 0usize;
779 let mut dynamic_spawn_count = 0usize;
780 let mut status = "max_rounds_reached".to_string();
781 let mut failure_note: Option<String> = None;
782
783 'relay_loop: for round in 1..=AUTOCHAT_MAX_ROUNDS {
784 let mut idx = 0usize;
785 while idx < ordered_agents.len() {
786 let to = ordered_agents[idx].clone();
787 let from = if idx == 0 {
788 if round == 1 {
789 "user".to_string()
790 } else {
791 ordered_agents[ordered_agents.len() - 1].clone()
792 }
793 } else {
794 ordered_agents[idx - 1].clone()
795 };
796
797 turns += 1;
798 relay.send_handoff(&from, &to, &baton);
799
800 let Some(mut session) = sessions.remove(&to) else {
801 status = "agent_error".to_string();
802 failure_note = Some(format!("Relay agent @{to} session was unavailable."));
803 break 'relay_loop;
804 };
805
806 let output = match session.prompt(&baton).await {
807 Ok(response) => response.text,
808 Err(err) => {
809 status = "agent_error".to_string();
810 failure_note = Some(format!("Relay agent @{to} failed: {err}"));
811 sessions.insert(to, session);
812 break 'relay_loop;
813 }
814 };
815
816 sessions.insert(to.clone(), session);
817
818 let normalized = normalize_for_convergence(&output);
819 if previous_normalized.as_deref() == Some(normalized.as_str()) {
820 convergence_hits += 1;
821 } else {
822 convergence_hits = 0;
823 }
824 previous_normalized = Some(normalized);
825
826 baton = format!(
827 "Relay task:\n{task}\n\nIncoming handoff from @{to}:\n{}\n\nContinue the work from this handoff. Keep your response focused and provide one concrete next-step instruction for the next agent.",
828 truncate_with_ellipsis(&output, 3_500)
829 );
830
831 let can_attempt_spawn = dynamic_spawn_count < AUTOCHAT_MAX_DYNAMIC_SPAWNS
832 && ordered_agents.len() < AUTOCHAT_MAX_AGENTS
833 && output.len() >= AUTOCHAT_SPAWN_CHECK_MIN_CHARS;
834
835 if can_attempt_spawn
836 && let Some(registry) = ®istry
837 && let Some((profile, reason)) = decide_dynamic_spawn_with_registry(
838 task,
839 model_ref,
840 &output,
841 round,
842 &ordered_agents,
843 registry,
844 )
845 .await
846 {
847 match Session::new().await {
848 Ok(mut spawned_session) => {
849 spawned_session.metadata.model = Some(model_ref.to_string());
850 spawned_session.agent = profile.name.clone();
851 spawned_session.add_message(Message {
852 role: Role::System,
853 content: vec![ContentPart::Text {
854 text: profile.instructions.clone(),
855 }],
856 });
857
858 relay.register_agents(&[RelayAgentProfile {
859 name: profile.name.clone(),
860 capabilities: profile.capabilities.clone(),
861 }]);
862
863 ordered_agents.insert(idx + 1, profile.name.clone());
864 sessions.insert(profile.name.clone(), spawned_session);
865 dynamic_spawn_count += 1;
866
867 tracing::info!(
868 agent = %profile.name,
869 reason = %reason,
870 "Dynamic relay spawn accepted"
871 );
872 }
873 Err(err) => {
874 tracing::warn!(
875 agent = %profile.name,
876 error = %err,
877 "Dynamic relay spawn requested but failed"
878 );
879 }
880 }
881 }
882
883 if convergence_hits >= 2 {
884 status = "converged".to_string();
885 break 'relay_loop;
886 }
887
888 idx += 1;
889 }
890 }
891
892 relay.shutdown_agents(&ordered_agents);
893
894 let mut summary = format!(
895 "Autochat complete ({status}) — relay {} with {} agents over {} turns.\n\nFinal relay handoff:\n{}",
896 relay.relay_id(),
897 ordered_agents.len(),
898 turns,
899 truncate_with_ellipsis(&baton, 4_000)
900 );
901 if let Some(note) = &failure_note {
902 summary.push_str(&format!("\n\nFailure detail: {note}"));
903 }
904 if planner_used {
905 summary.push_str("\n\nTeam planning: model-organized profiles.");
906 } else {
907 summary.push_str("\n\nTeam planning: fallback self-organizing profiles.");
908 }
909 if dynamic_spawn_count > 0 {
910 summary.push_str(&format!("\nDynamic relay spawns: {dynamic_spawn_count}"));
911 }
912
913 Ok(AutochatCliResult {
914 status,
915 relay_id: relay.relay_id().to_string(),
916 model: model_ref.to_string(),
917 agent_count: ordered_agents.len(),
918 turns,
919 agents: ordered_agents,
920 final_handoff: baton,
921 summary,
922 failure: failure_note,
923 })
924}
925
926#[cfg(test)]
927mod tests {
928 use super::PlannedRelayProfile;
929 use super::{
930 AUTOCHAT_QUICK_DEMO_TASK, PlannedRelayResponse, build_runtime_profile_from_plan,
931 command_with_optional_args, extract_json_payload, is_easy_go_command,
932 normalize_cli_go_command, parse_autochat_args, resolve_autochat_model,
933 };
934
935 #[test]
936 fn normalize_go_maps_to_autochat_with_count_and_task() {
937 assert_eq!(
938 normalize_cli_go_command("/go 4 build protocol relay"),
939 "/autochat 4 build protocol relay"
940 );
941 }
942
943 #[test]
944 fn normalize_go_count_only_uses_demo_task() {
945 assert_eq!(
946 normalize_cli_go_command("/go 4"),
947 format!("/autochat 4 {AUTOCHAT_QUICK_DEMO_TASK}")
948 );
949 }
950
951 #[test]
952 fn parse_autochat_args_supports_default_count() {
953 assert_eq!(
954 parse_autochat_args("build a relay").expect("valid args"),
955 (3, "build a relay")
956 );
957 }
958
959 #[test]
960 fn parse_autochat_args_supports_explicit_count() {
961 assert_eq!(
962 parse_autochat_args("4 build a relay").expect("valid args"),
963 (4, "build a relay")
964 );
965 }
966
967 #[test]
968 fn command_with_optional_args_avoids_prefix_collision() {
969 assert_eq!(command_with_optional_args("/autochatty", "/autochat"), None);
970 }
971
972 #[test]
973 fn easy_go_detection_handles_aliases() {
974 assert!(is_easy_go_command("/go 4 task"));
975 assert!(is_easy_go_command("/team 4 task"));
976 assert!(!is_easy_go_command("/autochat 4 task"));
977 }
978
979 #[test]
980 fn easy_go_defaults_to_minimax_when_model_not_set() {
981 assert_eq!(
982 resolve_autochat_model(None, None, Some("zai/glm-5"), true),
983 "minimax/MiniMax-M2.5"
984 );
985 }
986
987 #[test]
988 fn explicit_model_wins_over_easy_go_default() {
989 assert_eq!(
990 resolve_autochat_model(Some("zai/glm-5"), None, None, true),
991 "zai/glm-5"
992 );
993 }
994
995 #[test]
996 fn extract_json_payload_parses_markdown_wrapped_json() {
997 let wrapped = "Here is the plan:\n```json\n{\"profiles\":[{\"name\":\"auto-db\",\"specialty\":\"database\",\"mission\":\"Own schema and queries\",\"capabilities\":[\"sql\",\"indexing\"]}]}\n```";
998 let parsed: PlannedRelayResponse =
999 extract_json_payload(wrapped).expect("should parse wrapped JSON");
1000 assert_eq!(parsed.profiles.len(), 1);
1001 assert_eq!(parsed.profiles[0].name, "auto-db");
1002 }
1003
1004 #[test]
1005 fn build_runtime_profile_normalizes_and_deduplicates_name() {
1006 let planned = PlannedRelayProfile {
1007 name: "Data Specialist".to_string(),
1008 specialty: "data engineering".to_string(),
1009 mission: "Prepare datasets for downstream coding".to_string(),
1010 capabilities: vec!["ETL".to_string(), "sql".to_string()],
1011 };
1012
1013 let profile =
1014 build_runtime_profile_from_plan(planned, &["auto-data-specialist".to_string()])
1015 .expect("profile should be built");
1016
1017 assert_eq!(profile.name, "auto-data-specialist-2");
1018 assert!(profile.capabilities.iter().any(|cap| cap == "relay"));
1019 assert!(profile.capabilities.iter().any(|cap| cap == "autochat"));
1020 }
1021}