1use super::RunArgs;
4use crate::autochat::model_rotation::{RelayModelRotation, build_round_robin_model_rotation};
5use crate::autochat::shared_context::{
6 SharedRelayContext, compose_prompt_with_context, distill_context_delta_with_rlm,
7 drain_context_updates, publish_context_delta,
8};
9use crate::autochat::transport::{attach_handoff_receiver, consume_handoff_by_correlation};
10use crate::bus::{AgentBus, relay::ProtocolRelayRuntime, relay::RelayAgentProfile};
11use crate::config::Config;
12use crate::okr::{ApprovalDecision, KeyResult, Okr, OkrRepository, OkrRun};
13use crate::provider::{ContentPart, Message, Role};
14use crate::rlm::{FinalPayload, RlmExecutor};
15use crate::session::Session;
16use crate::session::import_codex_session_by_id;
17use anyhow::Result;
18use serde::{Deserialize, Serialize, de::DeserializeOwned};
19use std::collections::HashMap;
20use std::io::Write;
21use uuid::Uuid;
22
23const AUTOCHAT_MAX_AGENTS: usize = crate::autochat::AUTOCHAT_MAX_AGENTS;
24const AUTOCHAT_DEFAULT_AGENTS: usize = crate::autochat::AUTOCHAT_DEFAULT_AGENTS;
25const AUTOCHAT_MAX_ROUNDS: usize = crate::autochat::AUTOCHAT_MAX_ROUNDS;
26const AUTOCHAT_MAX_DYNAMIC_SPAWNS: usize = crate::autochat::AUTOCHAT_MAX_DYNAMIC_SPAWNS;
27const AUTOCHAT_SPAWN_CHECK_MIN_CHARS: usize = crate::autochat::AUTOCHAT_SPAWN_CHECK_MIN_CHARS;
28const AUTOCHAT_QUICK_DEMO_TASK: &str = crate::autochat::AUTOCHAT_QUICK_DEMO_TASK;
29const AUTOCHAT_RLM_THRESHOLD_CHARS: usize = crate::autochat::AUTOCHAT_RLM_THRESHOLD_CHARS;
30const AUTOCHAT_RLM_FALLBACK_CHARS: usize = crate::autochat::AUTOCHAT_RLM_FALLBACK_CHARS;
31const AUTOCHAT_RLM_HANDOFF_QUERY: &str = crate::autochat::AUTOCHAT_RLM_HANDOFF_QUERY;
32const GO_DEFAULT_MODEL: &str = "minimax-credits/MiniMax-M2.5-highspeed";
35
36fn parse_uuid_guarded(s: &str, context: &str) -> Option<Uuid> {
39 match s.parse::<Uuid>() {
40 Ok(uuid) => Some(uuid),
41 Err(e) => {
42 tracing::warn!(
43 context,
44 uuid_str = %s,
45 error = %e,
46 "Invalid UUID string - skipping operation"
47 );
48 None
49 }
50 }
51}
52
53#[derive(Debug, Clone)]
54struct RelayProfile {
55 name: String,
56 instructions: String,
57 capabilities: Vec<String>,
58}
59
60#[derive(Debug, Clone, Deserialize)]
61struct PlannedRelayProfile {
62 #[serde(default)]
63 name: String,
64 #[serde(default)]
65 specialty: String,
66 #[serde(default)]
67 mission: String,
68 #[serde(default)]
69 capabilities: Vec<String>,
70}
71
72#[derive(Debug, Clone, Deserialize)]
73struct PlannedRelayResponse {
74 #[serde(default)]
75 profiles: Vec<PlannedRelayProfile>,
76}
77
78#[derive(Debug, Clone, Deserialize)]
79struct RelaySpawnDecision {
80 #[serde(default)]
81 spawn: bool,
82 #[serde(default)]
83 reason: String,
84 #[serde(default)]
85 profile: Option<PlannedRelayProfile>,
86}
87
88#[derive(Debug, Clone, Deserialize)]
89struct PlannedOkrKeyResult {
90 #[serde(default)]
91 title: String,
92 #[serde(default)]
93 target_value: f64,
94 #[serde(default = "default_okr_unit")]
95 unit: String,
96}
97
98#[derive(Debug, Clone, Deserialize)]
99struct PlannedOkrDraft {
100 #[serde(default)]
101 title: String,
102 #[serde(default)]
103 description: String,
104 #[serde(default)]
105 key_results: Vec<PlannedOkrKeyResult>,
106}
107
108fn default_okr_unit() -> String {
109 "%".to_string()
110}
111
112#[derive(Debug, Serialize)]
113struct AutochatCliResult {
114 status: String,
115 relay_id: String,
116 model: String,
117 agent_count: usize,
118 turns: usize,
119 agents: Vec<String>,
120 final_handoff: String,
121 summary: String,
122 failure: Option<String>,
123 shared_context_items: usize,
124 rlm_context_count: usize,
125}
126
127fn slugify_label(value: &str) -> String {
128 let mut out = String::with_capacity(value.len());
129 let mut last_dash = false;
130
131 for ch in value.chars() {
132 let ch = ch.to_ascii_lowercase();
133 if ch.is_ascii_alphanumeric() {
134 out.push(ch);
135 last_dash = false;
136 } else if !last_dash {
137 out.push('-');
138 last_dash = true;
139 }
140 }
141
142 out.trim_matches('-').to_string()
143}
144
145fn sanitize_relay_agent_name(value: &str) -> String {
146 let raw = slugify_label(value);
147 let base = if raw.is_empty() {
148 "auto-specialist".to_string()
149 } else if raw.starts_with("auto-") {
150 raw
151 } else {
152 format!("auto-{raw}")
153 };
154
155 truncate_with_ellipsis(&base, 48)
156 .trim_end_matches("...")
157 .to_string()
158}
159
160fn unique_relay_agent_name(base: &str, existing: &[String]) -> String {
161 if !existing.iter().any(|name| name == base) {
162 return base.to_string();
163 }
164
165 let mut suffix = 2usize;
166 loop {
167 let candidate = format!("{base}-{suffix}");
168 if !existing.iter().any(|name| name == &candidate) {
169 return candidate;
170 }
171 suffix += 1;
172 }
173}
174
175fn relay_instruction_from_plan(name: &str, specialty: &str, mission: &str) -> String {
176 format!(
177 "You are @{name}.\n\
178 Specialty: {specialty}.\n\
179 Mission: {mission}\n\n\
180 This is a protocol-first relay conversation. Treat incoming handoffs as authoritative context.\n\
181 Keep responses concise, concrete, and useful for the next specialist.\n\
182 Include one clear recommendation for what the next agent should do.\n\
183 If the task is too large for the current team, explicitly call out missing specialties and handoff boundaries.",
184 )
185}
186
187fn build_runtime_profile_from_plan(
188 profile: PlannedRelayProfile,
189 existing: &[String],
190) -> Option<RelayProfile> {
191 let specialty = if profile.specialty.trim().is_empty() {
192 "generalist".to_string()
193 } else {
194 profile.specialty.trim().to_string()
195 };
196
197 let mission = if profile.mission.trim().is_empty() {
198 "Advance the relay with concrete next actions and clear handoffs.".to_string()
199 } else {
200 profile.mission.trim().to_string()
201 };
202
203 let base_name = if profile.name.trim().is_empty() {
204 format!("auto-{}", slugify_label(&specialty))
205 } else {
206 profile.name.trim().to_string()
207 };
208
209 let sanitized = sanitize_relay_agent_name(&base_name);
210 let name = unique_relay_agent_name(&sanitized, existing);
211 if name.trim().is_empty() {
212 return None;
213 }
214
215 let mut capabilities: Vec<String> = Vec::new();
216 let specialty_cap = slugify_label(&specialty);
217 if !specialty_cap.is_empty() {
218 capabilities.push(specialty_cap);
219 }
220
221 for capability in profile.capabilities {
222 let normalized = slugify_label(&capability);
223 if !normalized.is_empty() && !capabilities.contains(&normalized) {
224 capabilities.push(normalized);
225 }
226 }
227
228 crate::autochat::ensure_required_relay_capabilities(&mut capabilities);
229
230 Some(RelayProfile {
231 name: name.clone(),
232 instructions: relay_instruction_from_plan(&name, &specialty, &mission),
233 capabilities,
234 })
235}
236
237fn extract_json_payload<T: DeserializeOwned>(text: &str) -> Option<T> {
238 let trimmed = text.trim();
239 if let Ok(value) = serde_json::from_str::<T>(trimmed) {
240 return Some(value);
241 }
242
243 if let (Some(start), Some(end)) = (trimmed.find('{'), trimmed.rfind('}'))
244 && start < end
245 && let Ok(value) = serde_json::from_str::<T>(&trimmed[start..=end])
246 {
247 return Some(value);
248 }
249
250 if let (Some(start), Some(end)) = (trimmed.find('['), trimmed.rfind(']'))
251 && start < end
252 && let Ok(value) = serde_json::from_str::<T>(&trimmed[start..=end])
253 {
254 return Some(value);
255 }
256
257 None
258}
259
260fn resolve_provider_for_model_autochat(
261 registry: &std::sync::Arc<crate::provider::ProviderRegistry>,
262 model_ref: &str,
263) -> Option<(std::sync::Arc<dyn crate::provider::Provider>, String)> {
264 crate::autochat::model_rotation::resolve_provider_for_model_autochat(registry, model_ref)
265}
266
267fn default_relay_okr_template(okr_id: Uuid, task: &str) -> Okr {
268 let mut okr = Okr::new(
269 format!("Relay: {}", truncate_with_ellipsis(task, 60)),
270 format!("Execute relay task: {}", task),
271 );
272 okr.id = okr_id;
273
274 okr.add_key_result(KeyResult::new(
276 okr_id,
277 "Relay completes all rounds",
278 100.0,
279 "%",
280 ));
281 okr.add_key_result(KeyResult::new(
282 okr_id,
283 "Team produces actionable handoff",
284 1.0,
285 "count",
286 ));
287 okr.add_key_result(KeyResult::new(okr_id, "No critical errors", 0.0, "count"));
288 okr
289}
290
291fn okr_from_planned_draft(okr_id: Uuid, task: &str, planned: PlannedOkrDraft) -> Okr {
292 let title = if planned.title.trim().is_empty() {
293 format!("Relay: {}", truncate_with_ellipsis(task, 60))
294 } else {
295 planned.title.trim().to_string()
296 };
297
298 let description = if planned.description.trim().is_empty() {
299 format!("Execute relay task: {}", task)
300 } else {
301 planned.description.trim().to_string()
302 };
303
304 let mut okr = Okr::new(title, description);
305 okr.id = okr_id;
306
307 for kr in planned.key_results.into_iter().take(7) {
309 if kr.title.trim().is_empty() {
310 continue;
311 }
312 let unit = if kr.unit.trim().is_empty() {
313 default_okr_unit()
314 } else {
315 kr.unit
316 };
317 okr.add_key_result(KeyResult::new(
318 okr_id,
319 kr.title.trim().to_string(),
320 kr.target_value.max(0.0),
321 unit,
322 ));
323 }
324
325 if okr.key_results.is_empty() {
326 default_relay_okr_template(okr_id, task)
327 } else {
328 okr
329 }
330}
331
332async fn plan_okr_draft_with_registry(
333 task: &str,
334 model_ref: &str,
335 agent_count: usize,
336 registry: &std::sync::Arc<crate::provider::ProviderRegistry>,
337) -> Option<PlannedOkrDraft> {
338 let (provider, model_name) = resolve_provider_for_model_autochat(registry, model_ref)?;
339 let model_name_for_log = model_name.clone();
340
341 let request = crate::provider::CompletionRequest {
343 model: model_name,
344 messages: vec![
345 crate::provider::Message {
346 role: crate::provider::Role::System,
347 content: vec![crate::provider::ContentPart::Text {
348 text: "You write OKRs for execution governance. Return ONLY valid JSON."
349 .to_string(),
350 }],
351 },
352 crate::provider::Message {
353 role: crate::provider::Role::User,
354 content: vec![crate::provider::ContentPart::Text {
355 text: format!(
356 "Task:\n{task}\n\nTeam size: {agent_count}\n\n\
357 Propose ONE objective and 3-7 measurable key results for executing this task via an AI relay.\n\
358 Key results must be quantitative (numeric target_value + unit).\n\n\
359 Return JSON ONLY (no markdown):\n\
360 {{\n \"title\": \"...\",\n \"description\": \"...\",\n \"key_results\": [\n {{\"title\":\"...\",\"target_value\":123,\"unit\":\"%|count|tests|files|items\"}}\n ]\n}}\n\n\
361 Rules:\n\
362 - Avoid vague KRs like 'do better'\n\
363 - Prefer engineering outcomes (tests passing, endpoints implemented, docs updated, errors=0)\n\
364 - If unsure about a unit, use 'count'"
365 ),
366 }],
367 },
368 ],
369 tools: Vec::new(),
370 temperature: Some(0.4),
371 top_p: Some(0.9),
372 max_tokens: Some(900),
373 stop: Vec::new(),
374 };
375
376 let response = provider.complete(request).await.ok()?;
377 let text = response
378 .message
379 .content
380 .iter()
381 .filter_map(|part| match part {
382 crate::provider::ContentPart::Text { text }
383 | crate::provider::ContentPart::Thinking { text } => Some(text.as_str()),
384 _ => None,
385 })
386 .collect::<Vec<_>>()
387 .join("\n");
388
389 tracing::debug!(
390 model = %model_name_for_log,
391 response_len = text.len(),
392 response_preview = %text.chars().take(500).collect::<String>(),
393 "OKR draft model response"
394 );
395
396 let parsed = extract_json_payload::<PlannedOkrDraft>(&text);
397 if parsed.is_none() {
398 tracing::warn!(
399 model = %model_name_for_log,
400 response_preview = %text.chars().take(500).collect::<String>(),
401 "Failed to parse OKR draft JSON from model response"
402 );
403 }
404 parsed
405}
406
407async fn plan_relay_profiles_with_registry(
408 task: &str,
409 model_ref: &str,
410 requested_agents: usize,
411 registry: &std::sync::Arc<crate::provider::ProviderRegistry>,
412) -> Option<Vec<RelayProfile>> {
413 let (provider, model_name) = resolve_provider_for_model_autochat(registry, model_ref)?;
414 let requested_agents = requested_agents.clamp(2, AUTOCHAT_MAX_AGENTS);
415
416 let request = crate::provider::CompletionRequest {
417 model: model_name,
418 messages: vec![
419 crate::provider::Message {
420 role: crate::provider::Role::System,
421 content: vec![crate::provider::ContentPart::Text {
422 text: "You are a relay-team architect. Return ONLY valid JSON.".to_string(),
423 }],
424 },
425 crate::provider::Message {
426 role: crate::provider::Role::User,
427 content: vec![crate::provider::ContentPart::Text {
428 text: format!(
429 "Task:\n{task}\n\nDesign a task-specific relay team.\n\
430 Respond with JSON object only:\n\
431 {{\n \"profiles\": [\n {{\"name\":\"auto-...\",\"specialty\":\"...\",\"mission\":\"...\",\"capabilities\":[\"...\"]}}\n ]\n}}\n\
432 Requirements:\n\
433 - Return {} profiles\n\
434 - Names must be short kebab-case\n\
435 - Capabilities must be concise skill tags\n\
436 - Missions should be concrete and handoff-friendly",
437 requested_agents
438 ),
439 }],
440 },
441 ],
442 tools: Vec::new(),
443 temperature: Some(1.0),
444 top_p: Some(0.9),
445 max_tokens: Some(1200),
446 stop: Vec::new(),
447 };
448
449 let response = provider.complete(request).await.ok()?;
450 let text = response
451 .message
452 .content
453 .iter()
454 .filter_map(|part| match part {
455 crate::provider::ContentPart::Text { text }
456 | crate::provider::ContentPart::Thinking { text } => Some(text.as_str()),
457 _ => None,
458 })
459 .collect::<Vec<_>>()
460 .join("\n");
461
462 let planned = extract_json_payload::<PlannedRelayResponse>(&text)?;
463 let mut existing = Vec::<String>::new();
464 let mut runtime = Vec::<RelayProfile>::new();
465
466 for profile in planned.profiles.into_iter().take(AUTOCHAT_MAX_AGENTS) {
467 if let Some(runtime_profile) = build_runtime_profile_from_plan(profile, &existing) {
468 existing.push(runtime_profile.name.clone());
469 runtime.push(runtime_profile);
470 }
471 }
472
473 if runtime.len() >= 2 {
474 Some(runtime)
475 } else {
476 None
477 }
478}
479
480async fn decide_dynamic_spawn_with_registry(
481 task: &str,
482 model_ref: &str,
483 latest_output: &str,
484 round: usize,
485 ordered_agents: &[String],
486 registry: &std::sync::Arc<crate::provider::ProviderRegistry>,
487) -> Option<(RelayProfile, String)> {
488 let (provider, model_name) = resolve_provider_for_model_autochat(registry, model_ref)?;
489 let team = ordered_agents
490 .iter()
491 .map(|name| format!("@{name}"))
492 .collect::<Vec<_>>()
493 .join(", ");
494 let output_excerpt = truncate_with_ellipsis(latest_output, 2200);
495
496 let request = crate::provider::CompletionRequest {
497 model: model_name,
498 messages: vec![
499 crate::provider::Message {
500 role: crate::provider::Role::System,
501 content: vec![crate::provider::ContentPart::Text {
502 text: "You are a relay scaling controller. Return ONLY valid JSON.".to_string(),
503 }],
504 },
505 crate::provider::Message {
506 role: crate::provider::Role::User,
507 content: vec![crate::provider::ContentPart::Text {
508 text: format!(
509 "Task:\n{task}\n\nRound: {round}\nCurrent team: {team}\n\
510 Latest handoff excerpt:\n{output_excerpt}\n\n\
511 Decide whether the team needs one additional specialist right now.\n\
512 Respond with JSON object only:\n\
513 {{\n \"spawn\": true|false,\n \"reason\": \"...\",\n \"profile\": {{\"name\":\"auto-...\",\"specialty\":\"...\",\"mission\":\"...\",\"capabilities\":[\"...\"]}}\n}}\n\
514 If spawn=false, profile may be null or omitted."
515 ),
516 }],
517 },
518 ],
519 tools: Vec::new(),
520 temperature: Some(1.0),
521 top_p: Some(0.9),
522 max_tokens: Some(420),
523 stop: Vec::new(),
524 };
525
526 let response = provider.complete(request).await.ok()?;
527 let text = response
528 .message
529 .content
530 .iter()
531 .filter_map(|part| match part {
532 crate::provider::ContentPart::Text { text }
533 | crate::provider::ContentPart::Thinking { text } => Some(text.as_str()),
534 _ => None,
535 })
536 .collect::<Vec<_>>()
537 .join("\n");
538
539 let decision = extract_json_payload::<RelaySpawnDecision>(&text)?;
540 if !decision.spawn {
541 return None;
542 }
543
544 let profile = decision.profile?;
545 let runtime_profile = build_runtime_profile_from_plan(profile, ordered_agents)?;
546 let reason = if decision.reason.trim().is_empty() {
547 "Model requested additional specialist for task scope.".to_string()
548 } else {
549 decision.reason.trim().to_string()
550 };
551
552 Some((runtime_profile, reason))
553}
554
555pub async fn execute(args: RunArgs) -> Result<()> {
556 let message = args.message.trim();
557
558 if message.is_empty() {
559 anyhow::bail!("You must provide a message");
560 }
561
562 tracing::info!("Running with message: {}", message);
563
564 let config = Config::load().await.unwrap_or_default();
566 let workspace_dir = std::env::current_dir().unwrap_or_else(|_| std::path::PathBuf::from("."));
567 let knowledge_snapshot =
568 match crate::indexer::refresh_workspace_knowledge_snapshot(&workspace_dir).await {
569 Ok(path) => {
570 tracing::info!(
571 workspace = %workspace_dir.display(),
572 output = %path.display(),
573 "Refreshed workspace knowledge snapshot for run"
574 );
575 Some(path)
576 }
577 Err(e) => {
578 tracing::warn!(
579 workspace = %workspace_dir.display(),
580 error = %e,
581 "Failed to refresh workspace knowledge snapshot"
582 );
583 None
584 }
585 };
586
587 let easy_go_requested = is_easy_go_command(message);
591 let normalized = normalize_cli_go_command(message);
592 if let Some(rest) = command_with_optional_args(&normalized, "/autochat") {
593 let Some(parsed) = crate::autochat::parse_autochat_request(
594 rest,
595 AUTOCHAT_DEFAULT_AGENTS,
596 AUTOCHAT_QUICK_DEMO_TASK,
597 ) else {
598 anyhow::bail!(
599 "Usage: /autochat [count] [--no-prd] <task>\nEasy mode: /go <task>\ncount range: 2-{} (default: {})",
600 AUTOCHAT_MAX_AGENTS,
601 AUTOCHAT_DEFAULT_AGENTS
602 );
603 };
604 let agent_count = parsed.agent_count;
605 let parsed_task = parsed.task;
606 let task = if easy_go_requested {
607 validate_easy_go_task(&parsed_task)?
608 } else {
609 normalize_go_task_input(&parsed_task)
610 };
611 let require_prd = easy_go_requested || !parsed.bypass_prd;
612
613 if !(2..=AUTOCHAT_MAX_AGENTS).contains(&agent_count) {
614 anyhow::bail!(
615 "Invalid relay size {}. count must be between 2 and {}",
616 agent_count,
617 AUTOCHAT_MAX_AGENTS
618 );
619 }
620
621 let model = resolve_autochat_model(
622 args.model.as_deref(),
623 std::env::var("CODETETHER_DEFAULT_MODEL").ok().as_deref(),
624 config.default_model.as_deref(),
625 easy_go_requested,
626 );
627
628 if require_prd {
630 let max_concurrent = if easy_go_requested && !parsed.explicit_count {
633 AUTOCHAT_MAX_AGENTS
634 } else {
635 agent_count
636 };
637 let okr_id = Uuid::new_v4();
639 let registry_for_draft = crate::provider::ProviderRegistry::from_vault()
640 .await
641 .ok()
642 .map(std::sync::Arc::new);
643
644 let (mut okr, draft_note) = if let Some(registry) = ®istry_for_draft {
645 match plan_okr_draft_with_registry(&task, &model, agent_count, registry).await {
646 Some(planned) => (okr_from_planned_draft(okr_id, &task, planned), None),
647 None => (
648 default_relay_okr_template(okr_id, &task),
649 Some("(OKR: fallback template — model draft parse failed)".to_string()),
650 ),
651 }
652 } else {
653 (
654 default_relay_okr_template(okr_id, &task),
655 Some("(OKR: fallback template — provider unavailable)".to_string()),
656 )
657 };
658
659 let mut run = OkrRun::new(
661 okr_id,
662 format!("Run {}", chrono::Local::now().format("%Y-%m-%d %H:%M")),
663 );
664 let _ = run.submit_for_approval();
665
666 let command_label = if easy_go_requested {
668 "/go"
669 } else {
670 "/autochat"
671 };
672 println!("\n⚠️ {command_label} OKR Draft\n");
673 println!("Task: {}", truncate_with_ellipsis(&task, 80));
674 println!("Agents: {} | Model: {}", agent_count, model);
675 if let Some(note) = draft_note {
676 println!("{}", note);
677 }
678 println!("\nObjective: {}", okr.title);
679 println!("\nKey Results:");
680 for kr in &okr.key_results {
681 println!(" • {} (target: {} {})", kr.title, kr.target_value, kr.unit);
682 }
683 println!("\n");
684
685 print!("Approve OKR and start relay? [y/n]: ");
687 std::io::stdout().flush()?;
688 let mut input = String::new();
689 std::io::stdin().read_line(&mut input)?;
690
691 let input = input.trim().to_lowercase();
692 if input != "y" && input != "yes" {
693 run.record_decision(ApprovalDecision::deny(run.id, "User denied via CLI"));
694 println!("❌ OKR denied. Relay not started.");
695 println!("Use /autochat --no-prd for tactical execution without OKR/PRD tracking.");
696 return Ok(());
697 }
698
699 println!("✅ OKR approved! Starting Ralph PRD execution...\n");
700
701 let mut approved_run = run;
703 if let Ok(repo) = OkrRepository::from_config().await {
704 let _ = repo.create_okr(okr.clone()).await;
705 approved_run.record_decision(ApprovalDecision::approve(
706 approved_run.id,
707 "User approved via CLI",
708 ));
709 approved_run.correlation_id = Some(format!("ralph-{}", Uuid::new_v4()));
710 let _ = repo.create_run(approved_run.clone()).await;
711 tracing::info!(okr_id = %okr_id, okr_run_id = %approved_run.id, "OKR run approved and saved");
712 }
713
714 let registry = if let Some(registry) = registry_for_draft {
716 registry
717 } else {
718 std::sync::Arc::new(crate::provider::ProviderRegistry::from_vault().await?)
719 };
720 let (provider, resolved_model) = resolve_provider_for_model_autochat(®istry, &model)
721 .ok_or_else(|| anyhow::anyhow!("No provider available for model '{model}'"))?;
722
723 let bus = crate::bus::AgentBus::new().into_arc();
725 crate::bus::s3_sink::spawn_bus_s3_sink(bus.clone());
726
727 let ralph_result = super::go_ralph::execute_go_ralph(
729 &task,
730 &mut okr,
731 &mut approved_run,
732 provider,
733 &resolved_model,
734 10, Some(bus), max_concurrent, Some(registry.clone()), )
739 .await?;
740
741 if let Ok(repo) = OkrRepository::from_config().await {
743 let _ = repo.update_run(approved_run).await;
744 }
745
746 match args.format.as_str() {
748 "json" => println!(
749 "{}",
750 serde_json::to_string_pretty(&serde_json::json!({
751 "passed": ralph_result.passed,
752 "total": ralph_result.total,
753 "all_passed": ralph_result.all_passed,
754 "iterations": ralph_result.iterations,
755 "feature_branch": ralph_result.feature_branch,
756 "prd_path": ralph_result.prd_path.display().to_string(),
757 "status": format!("{:?}", ralph_result.status),
758 "stories": ralph_result.stories.iter().map(|s| serde_json::json!({
759 "id": s.id,
760 "title": s.title,
761 "passed": s.passed,
762 })).collect::<Vec<_>>(),
763 }))?
764 ),
765 _ => {
766 println!(
767 "{}",
768 super::go_ralph::format_go_ralph_result(&ralph_result, &task)
769 );
770 }
771 }
772 return Ok(());
773 }
774
775 let relay_result = run_protocol_first_relay(agent_count, &task, &model, None, None).await?;
777 match args.format.as_str() {
778 "json" => println!("{}", serde_json::to_string_pretty(&relay_result)?),
779 _ => {
780 println!("{}", relay_result.summary);
781 if let Some(failure) = &relay_result.failure {
782 eprintln!("\nFailure detail: {}", failure);
783 }
784 eprintln!(
785 "\n[Relay: {} | Model: {}]",
786 relay_result.relay_id, relay_result.model
787 );
788 }
789 }
790 return Ok(());
791 }
792
793 let mut session = if let Some(codex_id) = args.codex_session.clone() {
795 tracing::info!("Importing Codex session: {}", codex_id);
796 import_codex_session_by_id(&codex_id).await?
797 } else if let Some(session_id) = args.session.clone() {
798 tracing::info!("Continuing session: {}", session_id);
799 Session::load(&session_id).await?
800 } else if args.continue_session {
801 match Session::last_for_directory(Some(&workspace_dir)).await {
802 Ok(s) => {
803 tracing::info!(
804 session_id = %s.id,
805 workspace = %workspace_dir.display(),
806 "Continuing last workspace session"
807 );
808 s
809 }
810 Err(_) => {
811 let s = Session::new().await?;
812 tracing::info!(
813 session_id = %s.id,
814 workspace = %workspace_dir.display(),
815 "No workspace session found; created new session"
816 );
817 s
818 }
819 }
820 } else {
821 let s = Session::new().await?;
822 tracing::info!("Created new session: {}", s.id);
823 s
824 };
825
826 let model = args
828 .model
829 .or_else(|| std::env::var("CODETETHER_DEFAULT_MODEL").ok())
830 .or(config.default_model);
831
832 if let Some(model) = model {
833 tracing::info!("Using model: {}", model);
834 session.metadata.model = Some(model);
835 }
836
837 session.metadata.knowledge_snapshot = knowledge_snapshot;
838 if let Some(0) = args.max_steps {
839 anyhow::bail!("--max-steps must be at least 1");
840 }
841 session.max_steps = args.max_steps;
842
843 let bus = AgentBus::new().into_arc();
845 crate::bus::set_global(bus.clone());
846 crate::bus::s3_sink::spawn_bus_s3_sink(bus.clone());
847 session.bus = Some(bus);
848
849 let result = session.prompt(message).await?;
851
852 match args.format.as_str() {
854 "json" => {
855 println!("{}", serde_json::to_string_pretty(&result)?);
856 }
857 _ => {
858 println!("{}", result.text);
859 eprintln!(
861 "\n[Session: {} | Continue with: codetether run -c \"...\"]",
862 session.id
863 );
864 }
865 }
866
867 Ok(())
868}
869
870fn command_with_optional_args<'a>(input: &'a str, command: &str) -> Option<&'a str> {
871 let trimmed = input.trim();
872 let rest = trimmed.strip_prefix(command)?;
873
874 if rest.is_empty() {
875 return Some("");
876 }
877
878 let first = rest.chars().next()?;
879 if first.is_whitespace() {
880 Some(rest.trim())
881 } else {
882 None
883 }
884}
885
886fn normalize_cli_go_command(input: &str) -> String {
887 let trimmed = input.trim();
888 if trimmed.is_empty() || !trimmed.starts_with('/') {
889 return trimmed.to_string();
890 }
891
892 let mut parts = trimmed.splitn(2, char::is_whitespace);
893 let command = parts.next().unwrap_or("");
894 let args = parts.next().unwrap_or("").trim();
895
896 match command.to_ascii_lowercase().as_str() {
897 "/go" | "/team" => {
898 if args.is_empty() {
899 format!(
900 "/autochat {} {}",
901 AUTOCHAT_DEFAULT_AGENTS, AUTOCHAT_QUICK_DEMO_TASK
902 )
903 } else {
904 let mut count_and_task = args.splitn(2, char::is_whitespace);
905 let first = count_and_task.next().unwrap_or("");
906 if let Ok(count) = first.parse::<usize>() {
907 let task = count_and_task.next().unwrap_or("").trim();
908 if task.is_empty() {
909 format!("/autochat {count} {AUTOCHAT_QUICK_DEMO_TASK}")
910 } else {
911 format!("/autochat {count} {task}")
912 }
913 } else {
914 format!("/autochat {} {args}", AUTOCHAT_DEFAULT_AGENTS)
915 }
916 }
917 }
918 _ => trimmed.to_string(),
919 }
920}
921
922fn is_easy_go_command(input: &str) -> bool {
923 let command = input
924 .split_whitespace()
925 .next()
926 .unwrap_or("")
927 .to_ascii_lowercase();
928
929 matches!(command.as_str(), "/go" | "/team")
930}
931
932fn normalize_go_task_input(task: &str) -> String {
933 task.split_whitespace().collect::<Vec<_>>().join(" ")
934}
935
936fn looks_like_pasted_go_run_output(task: &str) -> bool {
937 let lower = task.to_ascii_lowercase();
938 let markers = [
939 "progress:",
940 "iterations:",
941 "feature branch:",
942 "stories:",
943 "incomplete stories:",
944 "next steps:",
945 "assessment is done and documented",
946 ];
947
948 let marker_hits = markers.iter().filter(|m| lower.contains(**m)).count();
949 marker_hits >= 2 || (task.len() > 400 && lower.contains("next steps:"))
950}
951
952fn validate_easy_go_task(task: &str) -> Result<String> {
953 let normalized = normalize_go_task_input(task);
954 if normalized.is_empty() {
955 anyhow::bail!(
956 "`/go` requires a task. Example: /go implement /v1/agent compatibility routes"
957 );
958 }
959
960 if looks_like_pasted_go_run_output(&normalized) {
961 anyhow::bail!(
962 "`/go` received text that looks like prior run output/logs. \
963Use a concise objective sentence instead, e.g. `/go implement /v1/agent/* compatibility routes`."
964 );
965 }
966
967 Ok(normalized)
968}
969
970fn resolve_autochat_model(
971 cli_model: Option<&str>,
972 env_model: Option<&str>,
973 config_model: Option<&str>,
974 easy_go_requested: bool,
975) -> String {
976 if let Some(model) = cli_model.filter(|value| !value.trim().is_empty()) {
977 return model.to_string();
978 }
979 if easy_go_requested {
980 return GO_DEFAULT_MODEL.to_string();
981 }
982 if let Some(model) = env_model.filter(|value| !value.trim().is_empty()) {
983 return model.to_string();
984 }
985 if let Some(model) = config_model.filter(|value| !value.trim().is_empty()) {
986 return model.to_string();
987 }
988 "zai/glm-5".to_string()
989}
990
991fn build_relay_profiles(count: usize) -> Vec<RelayProfile> {
992 let mut profiles = Vec::with_capacity(count);
993 for idx in 0..count {
994 let name = format!("auto-agent-{}", idx + 1);
995
996 let instructions = format!(
997 "You are @{name}.\n\
998 Role policy: self-organize from task context and current handoff instead of assuming a fixed persona.\n\
999 Mission: advance the relay with concrete, high-signal next actions and clear ownership boundaries.\n\n\
1000 This is a protocol-first relay conversation. Treat the incoming handoff as authoritative context.\n\
1001 Keep your response concise, concrete, and useful for the next specialist.\n\
1002 Include one clear recommendation for what the next agent should do.\n\
1003 If the task scope is too large, explicitly call out missing specialties and handoff boundaries.",
1004 );
1005 let mut capabilities = vec!["generalist".to_string(), "self-organizing".to_string()];
1006 crate::autochat::ensure_required_relay_capabilities(&mut capabilities);
1007
1008 profiles.push(RelayProfile {
1009 name,
1010 instructions,
1011 capabilities,
1012 });
1013 }
1014 profiles
1015}
1016
1017fn truncate_with_ellipsis(value: &str, max_chars: usize) -> String {
1018 if max_chars == 0 {
1019 return String::new();
1020 }
1021
1022 let mut chars = value.chars();
1023 let mut output = String::new();
1024 for _ in 0..max_chars {
1025 if let Some(ch) = chars.next() {
1026 output.push(ch);
1027 } else {
1028 return value.to_string();
1029 }
1030 }
1031
1032 if chars.next().is_some() {
1033 format!("{output}...")
1034 } else {
1035 output
1036 }
1037}
1038
1039fn normalize_for_convergence(text: &str) -> String {
1040 crate::autochat::normalize_for_convergence(text, 280)
1041}
1042
1043fn extract_semantic_handoff_from_rlm(answer: &str) -> String {
1044 match FinalPayload::parse(answer) {
1045 FinalPayload::Semantic(payload) => payload.answer,
1046 _ => answer.trim().to_string(),
1047 }
1048}
1049
1050async fn prepare_autochat_handoff_with_registry(
1051 task: &str,
1052 from_agent: &str,
1053 output: &str,
1054 model_ref: &str,
1055 registry: Option<&std::sync::Arc<crate::provider::ProviderRegistry>>,
1056) -> (String, bool) {
1057 let mut used_rlm = false;
1058 let mut relay_payload = if output.len() > AUTOCHAT_RLM_THRESHOLD_CHARS {
1059 truncate_with_ellipsis(output, AUTOCHAT_RLM_FALLBACK_CHARS)
1060 } else {
1061 output.to_string()
1062 };
1063
1064 if let Some(registry) = registry
1065 && let Some((provider, model_name)) =
1066 resolve_provider_for_model_autochat(registry, model_ref)
1067 {
1068 let mut executor =
1069 RlmExecutor::new(output.to_string(), provider, model_name).with_max_iterations(2);
1070 match executor.analyze(AUTOCHAT_RLM_HANDOFF_QUERY).await {
1071 Ok(result) => {
1072 let normalized = extract_semantic_handoff_from_rlm(&result.answer);
1073 if !normalized.is_empty() {
1074 relay_payload = normalized;
1075 used_rlm = true;
1076 }
1077 }
1078 Err(err) => {
1079 tracing::warn!(
1080 error = %err,
1081 "CLI RLM handoff normalization failed; using fallback payload"
1082 );
1083 }
1084 }
1085 }
1086
1087 (
1088 format!(
1089 "Relay task:\n{task}\n\nIncoming handoff from @{from_agent}:\n{relay_payload}\n\n\
1090 Continue the work from this handoff. Keep your response focused and provide one concrete next-step instruction for the next agent."
1091 ),
1092 used_rlm,
1093 )
1094}
1095
1096async fn run_protocol_first_relay(
1097 agent_count: usize,
1098 task: &str,
1099 model_ref: &str,
1100 okr_id: Option<Uuid>,
1101 okr_run_id: Option<Uuid>,
1102) -> Result<AutochatCliResult> {
1103 let bus = AgentBus::new().into_arc();
1104
1105 crate::bus::s3_sink::spawn_bus_s3_sink(bus.clone());
1107
1108 let relay = ProtocolRelayRuntime::new(bus.clone());
1109
1110 let registry = crate::provider::ProviderRegistry::from_vault()
1111 .await
1112 .ok()
1113 .map(std::sync::Arc::new);
1114
1115 let mut planner_used = false;
1116 let profiles = if let Some(registry) = ®istry {
1117 if let Some(planned) =
1118 plan_relay_profiles_with_registry(task, model_ref, agent_count, registry).await
1119 {
1120 planner_used = true;
1121 planned
1122 } else {
1123 build_relay_profiles(agent_count)
1124 }
1125 } else {
1126 build_relay_profiles(agent_count)
1127 };
1128
1129 let relay_profiles: Vec<RelayAgentProfile> = profiles
1130 .iter()
1131 .map(|profile| RelayAgentProfile {
1132 name: profile.name.clone(),
1133 capabilities: profile.capabilities.clone(),
1134 })
1135 .collect();
1136
1137 let mut ordered_agents: Vec<String> = profiles
1138 .iter()
1139 .map(|profile| profile.name.clone())
1140 .collect();
1141 let mut sessions: HashMap<String, Session> = HashMap::new();
1142 let mut relay_receivers: HashMap<String, crate::bus::BusHandle> = HashMap::new();
1143 let mut agent_models: HashMap<String, String> = HashMap::new();
1144 let mut model_rotation = if let Some(registry) = ®istry {
1145 build_round_robin_model_rotation(registry, model_ref).await
1146 } else {
1147 RelayModelRotation::fallback(model_ref)
1148 };
1149
1150 for profile in &profiles {
1151 let assigned_model_ref = model_rotation.next_model_ref(model_ref);
1152 let mut session = Session::new().await?;
1153 session.metadata.model = Some(assigned_model_ref.clone());
1154 session.set_agent_name(profile.name.clone());
1155 session.bus = Some(bus.clone());
1156 session.add_message(Message {
1157 role: Role::System,
1158 content: vec![ContentPart::Text {
1159 text: profile.instructions.clone(),
1160 }],
1161 });
1162 agent_models.insert(profile.name.clone(), assigned_model_ref);
1163 sessions.insert(profile.name.clone(), session);
1164 attach_handoff_receiver(&mut relay_receivers, bus.clone(), &profile.name);
1165 }
1166
1167 if ordered_agents.len() < 2 {
1168 anyhow::bail!("Autochat needs at least 2 agents to relay.");
1169 }
1170
1171 relay.register_agents(&relay_profiles);
1172 let mut context_receiver = bus.handle(format!("relay-context-{}", relay.relay_id()));
1173 let mut shared_context = SharedRelayContext::default();
1174
1175 let kr_targets: std::collections::HashMap<String, f64> =
1177 if let (Some(okr_id_val), Some(_run_id)) = (okr_id, okr_run_id) {
1178 if let Ok(repo) = crate::okr::persistence::OkrRepository::from_config().await {
1179 if let Ok(Some(okr)) = repo.get_okr(okr_id_val).await {
1180 okr.key_results
1181 .iter()
1182 .map(|kr| (kr.id.to_string(), kr.target_value))
1183 .collect()
1184 } else {
1185 std::collections::HashMap::new()
1186 }
1187 } else {
1188 std::collections::HashMap::new()
1189 }
1190 } else {
1191 std::collections::HashMap::new()
1192 };
1193
1194 let mut kr_progress: std::collections::HashMap<String, f64> = std::collections::HashMap::new();
1195
1196 let mut baton = format!(
1197 "Task:\n{task}\n\nStart by proposing an execution strategy and one immediate next step."
1198 );
1199 let mut previous_normalized: Option<String> = None;
1200 let mut convergence_hits = 0usize;
1201 let mut turns = 0usize;
1202 let mut dynamic_spawn_count = 0usize;
1203 let mut rlm_handoff_count = 0usize;
1204 let mut rlm_context_count = 0usize;
1205 let mut status = crate::autochat::AUTOCHAT_STATUS_MAX_ROUNDS_REACHED.to_string();
1206 let mut failure_note: Option<String> = None;
1207
1208 'relay_loop: for round in 1..=AUTOCHAT_MAX_ROUNDS {
1209 let mut idx = 0usize;
1210 while idx < ordered_agents.len() {
1211 let to = ordered_agents[idx].clone();
1212 let from = if idx == 0 {
1213 if round == 1 {
1214 "user".to_string()
1215 } else {
1216 ordered_agents[ordered_agents.len() - 1].clone()
1217 }
1218 } else {
1219 ordered_agents[idx - 1].clone()
1220 };
1221
1222 turns += 1;
1223 let _ =
1224 drain_context_updates(&mut context_receiver, relay.relay_id(), &mut shared_context);
1225 let correlation_id = relay.send_handoff(&from, &to, &baton);
1226 let consumed_handoff = match consume_handoff_by_correlation(
1227 &mut relay_receivers,
1228 &to,
1229 &correlation_id,
1230 )
1231 .await
1232 {
1233 Ok(handoff) => handoff,
1234 Err(err) => {
1235 status = "bus_error".to_string();
1236 failure_note = Some(format!(
1237 "Failed to consume handoff for @{to} (correlation={correlation_id}): {err}"
1238 ));
1239 break 'relay_loop;
1240 }
1241 };
1242 let prompt_input = compose_prompt_with_context(&consumed_handoff, &shared_context);
1243
1244 let Some(mut session) = sessions.remove(&to) else {
1245 status = "agent_error".to_string();
1246 failure_note = Some(format!("Relay agent @{to} session was unavailable."));
1247 break 'relay_loop;
1248 };
1249
1250 let output = match session.prompt(&prompt_input).await {
1251 Ok(response) => response.text,
1252 Err(err) => {
1253 status = "agent_error".to_string();
1254 failure_note = Some(format!("Relay agent @{to} failed: {err}"));
1255 sessions.insert(to, session);
1256 break 'relay_loop;
1257 }
1258 };
1259
1260 sessions.insert(to.clone(), session);
1261
1262 let normalized = normalize_for_convergence(&output);
1263 if previous_normalized.as_deref() == Some(normalized.as_str()) {
1264 convergence_hits += 1;
1265 } else {
1266 convergence_hits = 0;
1267 }
1268 previous_normalized = Some(normalized);
1269
1270 let turn_model_ref = agent_models
1271 .get(&to)
1272 .map(String::as_str)
1273 .unwrap_or(model_ref);
1274 let (next_handoff, used_rlm) = prepare_autochat_handoff_with_registry(
1275 task,
1276 &to,
1277 &output,
1278 turn_model_ref,
1279 registry.as_ref(),
1280 )
1281 .await;
1282 if used_rlm {
1283 rlm_handoff_count += 1;
1284 }
1285 let turn_context_provider = registry
1286 .as_ref()
1287 .and_then(|r| resolve_provider_for_model_autochat(r, turn_model_ref));
1288 let (context_delta, used_context_rlm) =
1289 distill_context_delta_with_rlm(&output, task, &to, turn_context_provider).await;
1290 if used_context_rlm {
1291 rlm_context_count += 1;
1292 }
1293 shared_context.merge_delta(&context_delta);
1294 let publisher = bus.handle(to.clone());
1295 publish_context_delta(
1296 &publisher,
1297 relay.relay_id(),
1298 &to,
1299 round,
1300 turns,
1301 &context_delta,
1302 );
1303 baton = next_handoff;
1304
1305 if !kr_targets.is_empty() {
1307 let max_turns = ordered_agents.len() * AUTOCHAT_MAX_ROUNDS;
1308 let progress_ratio = (turns as f64 / max_turns as f64).min(1.0);
1309
1310 for (kr_id, target) in &kr_targets {
1311 let current = progress_ratio * target;
1312 let existing = kr_progress.get(kr_id).copied().unwrap_or(0.0);
1313 if current > existing {
1314 kr_progress.insert(kr_id.clone(), current);
1315 }
1316 }
1317
1318 if let Some(run_id) = okr_run_id
1320 && let Ok(repo) = crate::okr::persistence::OkrRepository::from_config().await
1321 && let Ok(Some(mut run)) = repo.get_run(run_id).await
1322 && run.is_resumable()
1323 {
1324 run.iterations = turns as u32;
1325 for (kr_id, value) in &kr_progress {
1326 run.update_kr_progress(kr_id, *value);
1327 }
1328 run.status = crate::okr::OkrRunStatus::Running;
1329 let _ = repo.update_run(run).await;
1330 }
1331 }
1332
1333 let can_attempt_spawn = dynamic_spawn_count < AUTOCHAT_MAX_DYNAMIC_SPAWNS
1334 && ordered_agents.len() < AUTOCHAT_MAX_AGENTS
1335 && output.len() >= AUTOCHAT_SPAWN_CHECK_MIN_CHARS;
1336
1337 if can_attempt_spawn
1338 && let Some(registry) = ®istry
1339 && let Some((profile, reason)) = decide_dynamic_spawn_with_registry(
1340 task,
1341 model_ref,
1342 &output,
1343 round,
1344 &ordered_agents,
1345 registry,
1346 )
1347 .await
1348 {
1349 match Session::new().await {
1350 Ok(mut spawned_session) => {
1351 let spawned_model_ref = model_rotation.next_model_ref(model_ref);
1352 spawned_session.metadata.model = Some(spawned_model_ref.clone());
1353 spawned_session.set_agent_name(profile.name.clone());
1354 spawned_session.bus = Some(bus.clone());
1355 spawned_session.add_message(Message {
1356 role: Role::System,
1357 content: vec![ContentPart::Text {
1358 text: profile.instructions.clone(),
1359 }],
1360 });
1361
1362 relay.register_agents(&[RelayAgentProfile {
1363 name: profile.name.clone(),
1364 capabilities: profile.capabilities.clone(),
1365 }]);
1366 attach_handoff_receiver(&mut relay_receivers, bus.clone(), &profile.name);
1367
1368 ordered_agents.insert(idx + 1, profile.name.clone());
1369 agent_models.insert(profile.name.clone(), spawned_model_ref);
1370 sessions.insert(profile.name.clone(), spawned_session);
1371 dynamic_spawn_count += 1;
1372
1373 tracing::info!(
1374 agent = %profile.name,
1375 reason = %reason,
1376 "Dynamic relay spawn accepted"
1377 );
1378 }
1379 Err(err) => {
1380 tracing::warn!(
1381 agent = %profile.name,
1382 error = %err,
1383 "Dynamic relay spawn requested but failed"
1384 );
1385 }
1386 }
1387 }
1388
1389 if convergence_hits >= 2 {
1390 status = "converged".to_string();
1391 break 'relay_loop;
1392 }
1393
1394 idx += 1;
1395 }
1396 }
1397
1398 relay.shutdown_agents(&ordered_agents);
1399
1400 if let Some(run_id) = okr_run_id
1402 && let Ok(repo) = crate::okr::persistence::OkrRepository::from_config().await
1403 && let Ok(Some(mut run)) = repo.get_run(run_id).await
1404 {
1405 for (kr_id, value) in &kr_progress {
1407 run.update_kr_progress(kr_id, *value);
1408 }
1409
1410 let base_evidence = vec![
1412 format!("relay:{}", relay.relay_id()),
1413 format!("turns:{}", turns),
1414 format!("agents:{}", ordered_agents.len()),
1415 format!("status:{}", status),
1416 format!("rlm_handoffs:{}", rlm_handoff_count),
1417 format!("dynamic_spawns:{}", dynamic_spawn_count),
1418 ];
1419
1420 let outcome_type = if status == "converged" {
1421 crate::okr::KrOutcomeType::FeatureDelivered
1422 } else {
1423 crate::okr::KrOutcomeType::Evidence
1424 };
1425
1426 for (kr_id_str, value) in &kr_progress {
1428 if let Some(kr_uuid) = parse_uuid_guarded(kr_id_str, "cli_relay_outcome_kr_link") {
1430 let kr_description = format!(
1431 "CLI relay outcome for KR {}: {} agents, {} turns, status={}",
1432 kr_id_str,
1433 ordered_agents.len(),
1434 turns,
1435 status
1436 );
1437 run.outcomes.push({
1438 let mut outcome =
1439 crate::okr::KrOutcome::new(kr_uuid, kr_description).with_value(*value);
1440 outcome.run_id = Some(run.id);
1441 outcome.outcome_type = outcome_type;
1442 outcome.evidence = base_evidence.clone();
1443 outcome.source = "cli relay".to_string();
1444 outcome
1445 });
1446 }
1447 }
1448
1449 if status == "converged" {
1451 run.complete();
1452 } else if status == "agent_error" || status == "bus_error" {
1453 run.status = crate::okr::OkrRunStatus::Failed;
1454 } else {
1455 run.status = crate::okr::OkrRunStatus::Completed;
1456 }
1457 let _ = repo.update_run(run).await;
1458 }
1459
1460 let mut summary = format!(
1461 "Autochat complete ({status}) — relay {} with {} agents over {} turns.\n\nFinal relay handoff:\n{}",
1462 relay.relay_id(),
1463 ordered_agents.len(),
1464 turns,
1465 truncate_with_ellipsis(&baton, 4_000)
1466 );
1467 if let Some(note) = &failure_note {
1468 summary.push_str(&format!("\n\nFailure detail: {note}"));
1469 }
1470 if planner_used {
1471 summary.push_str("\n\nTeam planning: model-organized profiles.");
1472 } else {
1473 summary.push_str("\n\nTeam planning: fallback self-organizing profiles.");
1474 }
1475 if rlm_handoff_count > 0 {
1476 summary.push_str(&format!("\nRLM-normalized handoffs: {rlm_handoff_count}"));
1477 }
1478 if rlm_context_count > 0 {
1479 summary.push_str(&format!("\nRLM context deltas: {rlm_context_count}"));
1480 }
1481 if shared_context.item_count() > 0 {
1482 summary.push_str(&format!(
1483 "\nShared context items: {}",
1484 shared_context.item_count()
1485 ));
1486 }
1487 if dynamic_spawn_count > 0 {
1488 summary.push_str(&format!("\nDynamic relay spawns: {dynamic_spawn_count}"));
1489 }
1490
1491 Ok(AutochatCliResult {
1492 status,
1493 relay_id: relay.relay_id().to_string(),
1494 model: model_ref.to_string(),
1495 agent_count: ordered_agents.len(),
1496 turns,
1497 agents: ordered_agents,
1498 final_handoff: baton,
1499 summary,
1500 failure: failure_note,
1501 shared_context_items: shared_context.item_count(),
1502 rlm_context_count,
1503 })
1504}
1505
1506#[cfg(test)]
1507mod tests {
1508 use super::PlannedRelayProfile;
1509 use super::{
1510 AUTOCHAT_QUICK_DEMO_TASK, PlannedRelayResponse, build_runtime_profile_from_plan,
1511 command_with_optional_args, extract_json_payload, is_easy_go_command,
1512 normalize_cli_go_command, normalize_go_task_input, resolve_autochat_model,
1513 validate_easy_go_task,
1514 };
1515
1516 #[test]
1517 fn normalize_go_maps_to_autochat_with_count_and_task() {
1518 assert_eq!(
1519 normalize_cli_go_command("/go 4 build protocol relay"),
1520 "/autochat 4 build protocol relay"
1521 );
1522 }
1523
1524 #[test]
1525 fn normalize_go_count_only_uses_demo_task() {
1526 assert_eq!(
1527 normalize_cli_go_command("/go 4"),
1528 format!("/autochat 4 {AUTOCHAT_QUICK_DEMO_TASK}")
1529 );
1530 }
1531
1532 #[test]
1533 fn parse_autochat_args_supports_default_count() {
1534 let parsed =
1535 crate::autochat::parse_autochat_request("build a relay", 3, AUTOCHAT_QUICK_DEMO_TASK)
1536 .expect("valid args");
1537 assert_eq!(
1538 (parsed.agent_count, parsed.task.as_str()),
1539 (3, "build a relay"),
1540 );
1541 }
1542
1543 #[test]
1544 fn parse_autochat_args_supports_explicit_count() {
1545 let parsed =
1546 crate::autochat::parse_autochat_request("4 build a relay", 3, AUTOCHAT_QUICK_DEMO_TASK)
1547 .expect("valid args");
1548 assert_eq!(
1549 (parsed.agent_count, parsed.task.as_str()),
1550 (4, "build a relay"),
1551 );
1552 }
1553
1554 #[test]
1555 fn normalize_go_task_collapses_whitespace() {
1556 assert_eq!(
1557 normalize_go_task_input(" implement api\ncompat routes\twith tests "),
1558 "implement api compat routes with tests"
1559 );
1560 }
1561
1562 #[test]
1563 fn validate_go_task_rejects_pasted_run_output() {
1564 let pasted =
1565 "Task: foo Progress: 0/7 stories Iterations: 7/10 Incomplete stories: ... Next steps:";
1566 assert!(validate_easy_go_task(pasted).is_err());
1567 }
1568
1569 #[test]
1570 fn command_with_optional_args_avoids_prefix_collision() {
1571 assert_eq!(command_with_optional_args("/autochatty", "/autochat"), None);
1572 }
1573
1574 #[test]
1575 fn easy_go_detection_handles_aliases() {
1576 assert!(is_easy_go_command("/go 4 task"));
1577 assert!(is_easy_go_command("/team 4 task"));
1578 assert!(!is_easy_go_command("/autochat 4 task"));
1579 }
1580
1581 #[test]
1582 fn easy_go_defaults_to_minimax_when_model_not_set() {
1583 assert_eq!(
1584 resolve_autochat_model(None, None, Some("zai/glm-5"), true),
1585 "minimax-credits/MiniMax-M2.5-highspeed"
1586 );
1587 }
1588
1589 #[test]
1590 fn explicit_model_wins_over_easy_go_default() {
1591 assert_eq!(
1592 resolve_autochat_model(Some("zai/glm-5"), None, None, true),
1593 "zai/glm-5"
1594 );
1595 }
1596
1597 #[test]
1598 fn extract_json_payload_parses_markdown_wrapped_json() {
1599 let wrapped = "Here is the plan:\n```json\n{\"profiles\":[{\"name\":\"auto-db\",\"specialty\":\"database\",\"mission\":\"Own schema and queries\",\"capabilities\":[\"sql\",\"indexing\"]}]}\n```";
1600 let parsed: PlannedRelayResponse =
1601 extract_json_payload(wrapped).expect("should parse wrapped JSON");
1602 assert_eq!(parsed.profiles.len(), 1);
1603 assert_eq!(parsed.profiles[0].name, "auto-db");
1604 }
1605
1606 #[test]
1607 fn build_runtime_profile_normalizes_and_deduplicates_name() {
1608 let planned = PlannedRelayProfile {
1609 name: "Data Specialist".to_string(),
1610 specialty: "data engineering".to_string(),
1611 mission: "Prepare datasets for downstream coding".to_string(),
1612 capabilities: vec!["ETL".to_string(), "sql".to_string()],
1613 };
1614
1615 let profile =
1616 build_runtime_profile_from_plan(planned, &["auto-data-specialist".to_string()])
1617 .expect("profile should be built");
1618
1619 assert_eq!(profile.name, "auto-data-specialist-2");
1620 assert!(profile.capabilities.iter().any(|cap| cap == "relay"));
1621 assert!(profile.capabilities.iter().any(|cap| cap == "autochat"));
1622 }
1623}