1use super::RunArgs;
4use crate::bus::{AgentBus, relay::ProtocolRelayRuntime, relay::RelayAgentProfile};
5use crate::config::Config;
6use crate::okr::{ApprovalDecision, KeyResult, Okr, OkrRepository, OkrRun};
7use crate::provider::{ContentPart, Message, Role};
8use crate::session::Session;
9use anyhow::Result;
10use serde::{Deserialize, Serialize, de::DeserializeOwned};
11use std::collections::HashMap;
12use std::io::Write;
13use uuid::Uuid;
14
15const AUTOCHAT_MAX_AGENTS: usize = 100;
16const AUTOCHAT_DEFAULT_AGENTS: usize = 3;
17const AUTOCHAT_MAX_ROUNDS: usize = 3;
18const AUTOCHAT_MAX_DYNAMIC_SPAWNS: usize = 3;
19const AUTOCHAT_SPAWN_CHECK_MIN_CHARS: usize = 800;
20const AUTOCHAT_QUICK_DEMO_TASK: &str = "Self-organize into the right specialties for this task, then relay one concrete implementation plan with clear next handoffs.";
21const GO_DEFAULT_MODEL: &str = "minimax-credits/MiniMax-M2.5-highspeed";
22
23fn parse_uuid_guarded(s: &str, context: &str) -> Option<Uuid> {
26 match s.parse::<Uuid>() {
27 Ok(uuid) => Some(uuid),
28 Err(e) => {
29 tracing::warn!(
30 context,
31 uuid_str = %s,
32 error = %e,
33 "Invalid UUID string - skipping operation"
34 );
35 None
36 }
37 }
38}
39
40#[derive(Debug, Clone)]
41struct RelayProfile {
42 name: String,
43 instructions: String,
44 capabilities: Vec<String>,
45}
46
47#[derive(Debug, Clone, Deserialize)]
48struct PlannedRelayProfile {
49 #[serde(default)]
50 name: String,
51 #[serde(default)]
52 specialty: String,
53 #[serde(default)]
54 mission: String,
55 #[serde(default)]
56 capabilities: Vec<String>,
57}
58
59#[derive(Debug, Clone, Deserialize)]
60struct PlannedRelayResponse {
61 #[serde(default)]
62 profiles: Vec<PlannedRelayProfile>,
63}
64
65#[derive(Debug, Clone, Deserialize)]
66struct RelaySpawnDecision {
67 #[serde(default)]
68 spawn: bool,
69 #[serde(default)]
70 reason: String,
71 #[serde(default)]
72 profile: Option<PlannedRelayProfile>,
73}
74
75#[derive(Debug, Clone, Deserialize)]
76struct PlannedOkrKeyResult {
77 #[serde(default)]
78 title: String,
79 #[serde(default)]
80 target_value: f64,
81 #[serde(default = "default_okr_unit")]
82 unit: String,
83}
84
85#[derive(Debug, Clone, Deserialize)]
86struct PlannedOkrDraft {
87 #[serde(default)]
88 title: String,
89 #[serde(default)]
90 description: String,
91 #[serde(default)]
92 key_results: Vec<PlannedOkrKeyResult>,
93}
94
95fn default_okr_unit() -> String {
96 "%".to_string()
97}
98
99#[derive(Debug, Serialize)]
100struct AutochatCliResult {
101 status: String,
102 relay_id: String,
103 model: String,
104 agent_count: usize,
105 turns: usize,
106 agents: Vec<String>,
107 final_handoff: String,
108 summary: String,
109 failure: Option<String>,
110}
111
112fn slugify_label(value: &str) -> String {
113 let mut out = String::with_capacity(value.len());
114 let mut last_dash = false;
115
116 for ch in value.chars() {
117 let ch = ch.to_ascii_lowercase();
118 if ch.is_ascii_alphanumeric() {
119 out.push(ch);
120 last_dash = false;
121 } else if !last_dash {
122 out.push('-');
123 last_dash = true;
124 }
125 }
126
127 out.trim_matches('-').to_string()
128}
129
130fn sanitize_relay_agent_name(value: &str) -> String {
131 let raw = slugify_label(value);
132 let base = if raw.is_empty() {
133 "auto-specialist".to_string()
134 } else if raw.starts_with("auto-") {
135 raw
136 } else {
137 format!("auto-{raw}")
138 };
139
140 truncate_with_ellipsis(&base, 48)
141 .trim_end_matches("...")
142 .to_string()
143}
144
145fn unique_relay_agent_name(base: &str, existing: &[String]) -> String {
146 if !existing.iter().any(|name| name == base) {
147 return base.to_string();
148 }
149
150 let mut suffix = 2usize;
151 loop {
152 let candidate = format!("{base}-{suffix}");
153 if !existing.iter().any(|name| name == &candidate) {
154 return candidate;
155 }
156 suffix += 1;
157 }
158}
159
160fn relay_instruction_from_plan(name: &str, specialty: &str, mission: &str) -> String {
161 format!(
162 "You are @{name}.\n\
163 Specialty: {specialty}.\n\
164 Mission: {mission}\n\n\
165 This is a protocol-first relay conversation. Treat incoming handoffs as authoritative context.\n\
166 Keep responses concise, concrete, and useful for the next specialist.\n\
167 Include one clear recommendation for what the next agent should do.\n\
168 If the task is too large for the current team, explicitly call out missing specialties and handoff boundaries.",
169 )
170}
171
172fn build_runtime_profile_from_plan(
173 profile: PlannedRelayProfile,
174 existing: &[String],
175) -> Option<RelayProfile> {
176 let specialty = if profile.specialty.trim().is_empty() {
177 "generalist".to_string()
178 } else {
179 profile.specialty.trim().to_string()
180 };
181
182 let mission = if profile.mission.trim().is_empty() {
183 "Advance the relay with concrete next actions and clear handoffs.".to_string()
184 } else {
185 profile.mission.trim().to_string()
186 };
187
188 let base_name = if profile.name.trim().is_empty() {
189 format!("auto-{}", slugify_label(&specialty))
190 } else {
191 profile.name.trim().to_string()
192 };
193
194 let sanitized = sanitize_relay_agent_name(&base_name);
195 let name = unique_relay_agent_name(&sanitized, existing);
196 if name.trim().is_empty() {
197 return None;
198 }
199
200 let mut capabilities: Vec<String> = Vec::new();
201 let specialty_cap = slugify_label(&specialty);
202 if !specialty_cap.is_empty() {
203 capabilities.push(specialty_cap);
204 }
205
206 for capability in profile.capabilities {
207 let normalized = slugify_label(&capability);
208 if !normalized.is_empty() && !capabilities.contains(&normalized) {
209 capabilities.push(normalized);
210 }
211 }
212
213 for required in ["relay", "context-handoff", "autochat"] {
214 if !capabilities.iter().any(|capability| capability == required) {
215 capabilities.push(required.to_string());
216 }
217 }
218
219 Some(RelayProfile {
220 name: name.clone(),
221 instructions: relay_instruction_from_plan(&name, &specialty, &mission),
222 capabilities,
223 })
224}
225
226fn extract_json_payload<T: DeserializeOwned>(text: &str) -> Option<T> {
227 let trimmed = text.trim();
228 if let Ok(value) = serde_json::from_str::<T>(trimmed) {
229 return Some(value);
230 }
231
232 if let (Some(start), Some(end)) = (trimmed.find('{'), trimmed.rfind('}'))
233 && start < end
234 && let Ok(value) = serde_json::from_str::<T>(&trimmed[start..=end])
235 {
236 return Some(value);
237 }
238
239 if let (Some(start), Some(end)) = (trimmed.find('['), trimmed.rfind(']'))
240 && start < end
241 && let Ok(value) = serde_json::from_str::<T>(&trimmed[start..=end])
242 {
243 return Some(value);
244 }
245
246 None
247}
248
249fn resolve_provider_for_model_autochat(
250 registry: &std::sync::Arc<crate::provider::ProviderRegistry>,
251 model_ref: &str,
252) -> Option<(std::sync::Arc<dyn crate::provider::Provider>, String)> {
253 let (provider_name, model_name) = crate::provider::parse_model_string(model_ref);
254 if let Some(provider_name) = provider_name
255 && let Some(provider) = registry.get(provider_name)
256 {
257 return Some((provider, model_name.to_string()));
258 }
259
260 let fallbacks = [
261 "zai",
262 "openai",
263 "github-copilot",
264 "anthropic",
265 "openrouter",
266 "novita",
267 "moonshotai",
268 "google",
269 ];
270
271 for provider_name in fallbacks {
272 if let Some(provider) = registry.get(provider_name) {
273 return Some((provider, model_ref.to_string()));
274 }
275 }
276
277 registry
278 .list()
279 .first()
280 .copied()
281 .and_then(|name| registry.get(name))
282 .map(|provider| (provider, model_ref.to_string()))
283}
284
285fn default_relay_okr_template(okr_id: Uuid, task: &str) -> Okr {
286 let mut okr = Okr::new(
287 format!("Relay: {}", truncate_with_ellipsis(task, 60)),
288 format!("Execute relay task: {}", task),
289 );
290 okr.id = okr_id;
291
292 okr.add_key_result(KeyResult::new(
294 okr_id,
295 "Relay completes all rounds",
296 100.0,
297 "%",
298 ));
299 okr.add_key_result(KeyResult::new(
300 okr_id,
301 "Team produces actionable handoff",
302 1.0,
303 "count",
304 ));
305 okr.add_key_result(KeyResult::new(okr_id, "No critical errors", 0.0, "count"));
306 okr
307}
308
309fn okr_from_planned_draft(okr_id: Uuid, task: &str, planned: PlannedOkrDraft) -> Okr {
310 let title = if planned.title.trim().is_empty() {
311 format!("Relay: {}", truncate_with_ellipsis(task, 60))
312 } else {
313 planned.title.trim().to_string()
314 };
315
316 let description = if planned.description.trim().is_empty() {
317 format!("Execute relay task: {}", task)
318 } else {
319 planned.description.trim().to_string()
320 };
321
322 let mut okr = Okr::new(title, description);
323 okr.id = okr_id;
324
325 for kr in planned.key_results.into_iter().take(7) {
327 if kr.title.trim().is_empty() {
328 continue;
329 }
330 let unit = if kr.unit.trim().is_empty() {
331 default_okr_unit()
332 } else {
333 kr.unit
334 };
335 okr.add_key_result(KeyResult::new(
336 okr_id,
337 kr.title.trim().to_string(),
338 kr.target_value.max(0.0),
339 unit,
340 ));
341 }
342
343 if okr.key_results.is_empty() {
344 default_relay_okr_template(okr_id, task)
345 } else {
346 okr
347 }
348}
349
350async fn plan_okr_draft_with_registry(
351 task: &str,
352 model_ref: &str,
353 agent_count: usize,
354 registry: &std::sync::Arc<crate::provider::ProviderRegistry>,
355) -> Option<PlannedOkrDraft> {
356 let (provider, model_name) = resolve_provider_for_model_autochat(registry, model_ref)?;
357
358 let request = crate::provider::CompletionRequest {
360 model: model_name,
361 messages: vec![
362 crate::provider::Message {
363 role: crate::provider::Role::System,
364 content: vec![crate::provider::ContentPart::Text {
365 text: "You write OKRs for execution governance. Return ONLY valid JSON."
366 .to_string(),
367 }],
368 },
369 crate::provider::Message {
370 role: crate::provider::Role::User,
371 content: vec![crate::provider::ContentPart::Text {
372 text: format!(
373 "Task:\n{task}\n\nTeam size: {agent_count}\n\n\
374 Propose ONE objective and 3-7 measurable key results for executing this task via an AI relay.\n\
375 Key results must be quantitative (numeric target_value + unit).\n\n\
376 Return JSON ONLY (no markdown):\n\
377 {{\n \"title\": \"...\",\n \"description\": \"...\",\n \"key_results\": [\n {{\"title\":\"...\",\"target_value\":123,\"unit\":\"%|count|tests|files|items\"}}\n ]\n}}\n\n\
378 Rules:\n\
379 - Avoid vague KRs like 'do better'\n\
380 - Prefer engineering outcomes (tests passing, endpoints implemented, docs updated, errors=0)\n\
381 - If unsure about a unit, use 'count'"
382 ),
383 }],
384 },
385 ],
386 tools: Vec::new(),
387 temperature: Some(0.4),
388 top_p: Some(0.9),
389 max_tokens: Some(900),
390 stop: Vec::new(),
391 };
392
393 let response = provider.complete(request).await.ok()?;
394 let text = response
395 .message
396 .content
397 .iter()
398 .filter_map(|part| match part {
399 crate::provider::ContentPart::Text { text }
400 | crate::provider::ContentPart::Thinking { text } => Some(text.as_str()),
401 _ => None,
402 })
403 .collect::<Vec<_>>()
404 .join("\n");
405
406 extract_json_payload::<PlannedOkrDraft>(&text)
407}
408
409async fn plan_relay_profiles_with_registry(
410 task: &str,
411 model_ref: &str,
412 requested_agents: usize,
413 registry: &std::sync::Arc<crate::provider::ProviderRegistry>,
414) -> Option<Vec<RelayProfile>> {
415 let (provider, model_name) = resolve_provider_for_model_autochat(registry, model_ref)?;
416 let requested_agents = requested_agents.clamp(2, AUTOCHAT_MAX_AGENTS);
417
418 let request = crate::provider::CompletionRequest {
419 model: model_name,
420 messages: vec![
421 crate::provider::Message {
422 role: crate::provider::Role::System,
423 content: vec![crate::provider::ContentPart::Text {
424 text: "You are a relay-team architect. Return ONLY valid JSON.".to_string(),
425 }],
426 },
427 crate::provider::Message {
428 role: crate::provider::Role::User,
429 content: vec![crate::provider::ContentPart::Text {
430 text: format!(
431 "Task:\n{task}\n\nDesign a task-specific relay team.\n\
432 Respond with JSON object only:\n\
433 {{\n \"profiles\": [\n {{\"name\":\"auto-...\",\"specialty\":\"...\",\"mission\":\"...\",\"capabilities\":[\"...\"]}}\n ]\n}}\n\
434 Requirements:\n\
435 - Return {} profiles\n\
436 - Names must be short kebab-case\n\
437 - Capabilities must be concise skill tags\n\
438 - Missions should be concrete and handoff-friendly",
439 requested_agents
440 ),
441 }],
442 },
443 ],
444 tools: Vec::new(),
445 temperature: Some(1.0),
446 top_p: Some(0.9),
447 max_tokens: Some(1200),
448 stop: Vec::new(),
449 };
450
451 let response = provider.complete(request).await.ok()?;
452 let text = response
453 .message
454 .content
455 .iter()
456 .filter_map(|part| match part {
457 crate::provider::ContentPart::Text { text }
458 | crate::provider::ContentPart::Thinking { text } => Some(text.as_str()),
459 _ => None,
460 })
461 .collect::<Vec<_>>()
462 .join("\n");
463
464 let planned = extract_json_payload::<PlannedRelayResponse>(&text)?;
465 let mut existing = Vec::<String>::new();
466 let mut runtime = Vec::<RelayProfile>::new();
467
468 for profile in planned.profiles.into_iter().take(AUTOCHAT_MAX_AGENTS) {
469 if let Some(runtime_profile) = build_runtime_profile_from_plan(profile, &existing) {
470 existing.push(runtime_profile.name.clone());
471 runtime.push(runtime_profile);
472 }
473 }
474
475 if runtime.len() >= 2 {
476 Some(runtime)
477 } else {
478 None
479 }
480}
481
482async fn decide_dynamic_spawn_with_registry(
483 task: &str,
484 model_ref: &str,
485 latest_output: &str,
486 round: usize,
487 ordered_agents: &[String],
488 registry: &std::sync::Arc<crate::provider::ProviderRegistry>,
489) -> Option<(RelayProfile, String)> {
490 let (provider, model_name) = resolve_provider_for_model_autochat(registry, model_ref)?;
491 let team = ordered_agents
492 .iter()
493 .map(|name| format!("@{name}"))
494 .collect::<Vec<_>>()
495 .join(", ");
496 let output_excerpt = truncate_with_ellipsis(latest_output, 2200);
497
498 let request = crate::provider::CompletionRequest {
499 model: model_name,
500 messages: vec![
501 crate::provider::Message {
502 role: crate::provider::Role::System,
503 content: vec![crate::provider::ContentPart::Text {
504 text: "You are a relay scaling controller. Return ONLY valid JSON.".to_string(),
505 }],
506 },
507 crate::provider::Message {
508 role: crate::provider::Role::User,
509 content: vec![crate::provider::ContentPart::Text {
510 text: format!(
511 "Task:\n{task}\n\nRound: {round}\nCurrent team: {team}\n\
512 Latest handoff excerpt:\n{output_excerpt}\n\n\
513 Decide whether the team needs one additional specialist right now.\n\
514 Respond with JSON object only:\n\
515 {{\n \"spawn\": true|false,\n \"reason\": \"...\",\n \"profile\": {{\"name\":\"auto-...\",\"specialty\":\"...\",\"mission\":\"...\",\"capabilities\":[\"...\"]}}\n}}\n\
516 If spawn=false, profile may be null or omitted."
517 ),
518 }],
519 },
520 ],
521 tools: Vec::new(),
522 temperature: Some(1.0),
523 top_p: Some(0.9),
524 max_tokens: Some(420),
525 stop: Vec::new(),
526 };
527
528 let response = provider.complete(request).await.ok()?;
529 let text = response
530 .message
531 .content
532 .iter()
533 .filter_map(|part| match part {
534 crate::provider::ContentPart::Text { text }
535 | crate::provider::ContentPart::Thinking { text } => Some(text.as_str()),
536 _ => None,
537 })
538 .collect::<Vec<_>>()
539 .join("\n");
540
541 let decision = extract_json_payload::<RelaySpawnDecision>(&text)?;
542 if !decision.spawn {
543 return None;
544 }
545
546 let profile = decision.profile?;
547 let runtime_profile = build_runtime_profile_from_plan(profile, ordered_agents)?;
548 let reason = if decision.reason.trim().is_empty() {
549 "Model requested additional specialist for task scope.".to_string()
550 } else {
551 decision.reason.trim().to_string()
552 };
553
554 Some((runtime_profile, reason))
555}
556
557pub async fn execute(args: RunArgs) -> Result<()> {
558 let message = args.message.trim();
559
560 if message.is_empty() {
561 anyhow::bail!("You must provide a message");
562 }
563
564 tracing::info!("Running with message: {}", message);
565
566 let config = Config::load().await.unwrap_or_default();
568
569 let easy_go_requested = is_easy_go_command(message);
573 let normalized = normalize_cli_go_command(message);
574 if let Some(rest) = command_with_optional_args(&normalized, "/autochat") {
575 let Some((agent_count, parsed_task)) = parse_autochat_args(rest) else {
576 anyhow::bail!(
577 "Usage: /autochat [count] <task>\nEasy mode: /go <task>\ncount range: 2-{} (default: {})",
578 AUTOCHAT_MAX_AGENTS,
579 AUTOCHAT_DEFAULT_AGENTS
580 );
581 };
582 let task = if easy_go_requested {
583 validate_easy_go_task(parsed_task)?
584 } else {
585 normalize_go_task_input(parsed_task)
586 };
587
588 if !(2..=AUTOCHAT_MAX_AGENTS).contains(&agent_count) {
589 anyhow::bail!(
590 "Invalid relay size {}. count must be between 2 and {}",
591 agent_count,
592 AUTOCHAT_MAX_AGENTS
593 );
594 }
595
596 let model = resolve_autochat_model(
597 args.model.as_deref(),
598 std::env::var("CODETETHER_DEFAULT_MODEL").ok().as_deref(),
599 config.default_model.as_deref(),
600 easy_go_requested,
601 );
602
603 if easy_go_requested {
605 let max_concurrent = if rest.trim().starts_with(|c: char| c.is_ascii_digit()) {
608 agent_count
609 } else {
610 AUTOCHAT_MAX_AGENTS
611 };
612 let okr_id = Uuid::new_v4();
614 let registry_for_draft = crate::provider::ProviderRegistry::from_vault()
615 .await
616 .ok()
617 .map(std::sync::Arc::new);
618
619 let (mut okr, draft_note) = if let Some(registry) = ®istry_for_draft {
620 match plan_okr_draft_with_registry(&task, &model, agent_count, registry).await {
621 Some(planned) => (okr_from_planned_draft(okr_id, &task, planned), None),
622 None => (
623 default_relay_okr_template(okr_id, &task),
624 Some("(OKR: fallback template — model draft parse failed)".to_string()),
625 ),
626 }
627 } else {
628 (
629 default_relay_okr_template(okr_id, &task),
630 Some("(OKR: fallback template — provider unavailable)".to_string()),
631 )
632 };
633
634 let mut run = OkrRun::new(
636 okr_id,
637 format!("Run {}", chrono::Local::now().format("%Y-%m-%d %H:%M")),
638 );
639 let _ = run.submit_for_approval();
640
641 println!("\n⚠️ /go OKR Draft\n");
643 println!("Task: {}", truncate_with_ellipsis(&task, 80));
644 println!("Agents: {} | Model: {}", agent_count, model);
645 if let Some(note) = draft_note {
646 println!("{}", note);
647 }
648 println!("\nObjective: {}", okr.title);
649 println!("\nKey Results:");
650 for kr in &okr.key_results {
651 println!(" • {} (target: {} {})", kr.title, kr.target_value, kr.unit);
652 }
653 println!("\n");
654
655 print!("Approve OKR and start relay? [y/n]: ");
657 std::io::stdout().flush()?;
658 let mut input = String::new();
659 std::io::stdin().read_line(&mut input)?;
660
661 let input = input.trim().to_lowercase();
662 if input != "y" && input != "yes" {
663 run.record_decision(ApprovalDecision::deny(run.id, "User denied via CLI"));
664 println!("❌ OKR denied. Relay not started.");
665 println!("Use /autochat for tactical execution without OKR tracking.");
666 return Ok(());
667 }
668
669 println!("✅ OKR approved! Starting Ralph PRD execution...\n");
670
671 let mut approved_run = run;
673 if let Ok(repo) = OkrRepository::from_config().await {
674 let _ = repo.create_okr(okr.clone()).await;
675 approved_run.record_decision(ApprovalDecision::approve(
676 approved_run.id,
677 "User approved via CLI",
678 ));
679 approved_run.correlation_id = Some(format!("ralph-{}", Uuid::new_v4()));
680 let _ = repo.create_run(approved_run.clone()).await;
681 tracing::info!(okr_id = %okr_id, okr_run_id = %approved_run.id, "OKR run approved and saved");
682 }
683
684 let registry = if let Some(registry) = registry_for_draft {
686 registry
687 } else {
688 std::sync::Arc::new(crate::provider::ProviderRegistry::from_vault().await?)
689 };
690 let (provider, resolved_model) = resolve_provider_for_model_autochat(®istry, &model)
691 .ok_or_else(|| anyhow::anyhow!("No provider available for model '{model}'"))?;
692
693 let bus = crate::bus::AgentBus::new().into_arc();
695 crate::bus::s3_sink::spawn_bus_s3_sink(bus.clone());
696
697 let ralph_result = super::go_ralph::execute_go_ralph(
699 &task,
700 &mut okr,
701 &mut approved_run,
702 provider,
703 &resolved_model,
704 10, Some(bus), max_concurrent, Some(registry.clone()), )
709 .await?;
710
711 if let Ok(repo) = OkrRepository::from_config().await {
713 let _ = repo.update_run(approved_run).await;
714 }
715
716 match args.format.as_str() {
718 "json" => println!(
719 "{}",
720 serde_json::to_string_pretty(&serde_json::json!({
721 "passed": ralph_result.passed,
722 "total": ralph_result.total,
723 "all_passed": ralph_result.all_passed,
724 "iterations": ralph_result.iterations,
725 "feature_branch": ralph_result.feature_branch,
726 "prd_path": ralph_result.prd_path.display().to_string(),
727 "status": format!("{:?}", ralph_result.status),
728 "stories": ralph_result.stories.iter().map(|s| serde_json::json!({
729 "id": s.id,
730 "title": s.title,
731 "passed": s.passed,
732 })).collect::<Vec<_>>(),
733 }))?
734 ),
735 _ => {
736 println!(
737 "{}",
738 super::go_ralph::format_go_ralph_result(&ralph_result, &task)
739 );
740 }
741 }
742 return Ok(());
743 }
744
745 let relay_result = run_protocol_first_relay(agent_count, &task, &model, None, None).await?;
747 match args.format.as_str() {
748 "json" => println!("{}", serde_json::to_string_pretty(&relay_result)?),
749 _ => {
750 println!("{}", relay_result.summary);
751 if let Some(failure) = &relay_result.failure {
752 eprintln!("\nFailure detail: {}", failure);
753 }
754 eprintln!(
755 "\n[Relay: {} | Model: {}]",
756 relay_result.relay_id, relay_result.model
757 );
758 }
759 }
760 return Ok(());
761 }
762
763 let mut session = if let Some(session_id) = args.session.clone() {
765 tracing::info!("Continuing session: {}", session_id);
766 Session::load(&session_id).await?
767 } else if args.continue_session {
768 let workspace_dir = std::env::current_dir().unwrap_or_default();
769 match Session::last_for_directory(Some(&workspace_dir)).await {
770 Ok(s) => {
771 tracing::info!(
772 session_id = %s.id,
773 workspace = %workspace_dir.display(),
774 "Continuing last workspace session"
775 );
776 s
777 }
778 Err(_) => {
779 let s = Session::new().await?;
780 tracing::info!(
781 session_id = %s.id,
782 workspace = %workspace_dir.display(),
783 "No workspace session found; created new session"
784 );
785 s
786 }
787 }
788 } else {
789 let s = Session::new().await?;
790 tracing::info!("Created new session: {}", s.id);
791 s
792 };
793
794 let model = args
796 .model
797 .or_else(|| std::env::var("CODETETHER_DEFAULT_MODEL").ok())
798 .or(config.default_model);
799
800 if let Some(model) = model {
801 tracing::info!("Using model: {}", model);
802 session.metadata.model = Some(model);
803 }
804
805 let bus = AgentBus::new().into_arc();
807 crate::bus::s3_sink::spawn_bus_s3_sink(bus.clone());
808 session.bus = Some(bus);
809
810 let result = session.prompt(message).await?;
812
813 match args.format.as_str() {
815 "json" => {
816 println!("{}", serde_json::to_string_pretty(&result)?);
817 }
818 _ => {
819 println!("{}", result.text);
820 eprintln!(
822 "\n[Session: {} | Continue with: codetether run -c \"...\"]",
823 session.id
824 );
825 }
826 }
827
828 Ok(())
829}
830
831fn command_with_optional_args<'a>(input: &'a str, command: &str) -> Option<&'a str> {
832 let trimmed = input.trim();
833 let rest = trimmed.strip_prefix(command)?;
834
835 if rest.is_empty() {
836 return Some("");
837 }
838
839 let first = rest.chars().next()?;
840 if first.is_whitespace() {
841 Some(rest.trim())
842 } else {
843 None
844 }
845}
846
847fn normalize_cli_go_command(input: &str) -> String {
848 let trimmed = input.trim();
849 if trimmed.is_empty() || !trimmed.starts_with('/') {
850 return trimmed.to_string();
851 }
852
853 let mut parts = trimmed.splitn(2, char::is_whitespace);
854 let command = parts.next().unwrap_or("");
855 let args = parts.next().unwrap_or("").trim();
856
857 match command.to_ascii_lowercase().as_str() {
858 "/go" | "/team" => {
859 if args.is_empty() {
860 format!(
861 "/autochat {} {}",
862 AUTOCHAT_DEFAULT_AGENTS, AUTOCHAT_QUICK_DEMO_TASK
863 )
864 } else {
865 let mut count_and_task = args.splitn(2, char::is_whitespace);
866 let first = count_and_task.next().unwrap_or("");
867 if let Ok(count) = first.parse::<usize>() {
868 let task = count_and_task.next().unwrap_or("").trim();
869 if task.is_empty() {
870 format!("/autochat {count} {AUTOCHAT_QUICK_DEMO_TASK}")
871 } else {
872 format!("/autochat {count} {task}")
873 }
874 } else {
875 format!("/autochat {} {args}", AUTOCHAT_DEFAULT_AGENTS)
876 }
877 }
878 }
879 _ => trimmed.to_string(),
880 }
881}
882
883fn is_easy_go_command(input: &str) -> bool {
884 let command = input
885 .trim_start()
886 .split_whitespace()
887 .next()
888 .unwrap_or("")
889 .to_ascii_lowercase();
890
891 matches!(command.as_str(), "/go" | "/team")
892}
893
894fn normalize_go_task_input(task: &str) -> String {
895 task.split_whitespace().collect::<Vec<_>>().join(" ")
896}
897
898fn looks_like_pasted_go_run_output(task: &str) -> bool {
899 let lower = task.to_ascii_lowercase();
900 let markers = [
901 "progress:",
902 "iterations:",
903 "feature branch:",
904 "stories:",
905 "incomplete stories:",
906 "next steps:",
907 "assessment is done and documented",
908 ];
909
910 let marker_hits = markers.iter().filter(|m| lower.contains(**m)).count();
911 marker_hits >= 2 || (task.len() > 400 && lower.contains("next steps:"))
912}
913
914fn validate_easy_go_task(task: &str) -> Result<String> {
915 let normalized = normalize_go_task_input(task);
916 if normalized.is_empty() {
917 anyhow::bail!(
918 "`/go` requires a task. Example: /go implement /v1/agent compatibility routes"
919 );
920 }
921
922 if looks_like_pasted_go_run_output(&normalized) {
923 anyhow::bail!(
924 "`/go` received text that looks like prior run output/logs. \
925Use a concise objective sentence instead, e.g. `/go implement /v1/agent/* compatibility routes`."
926 );
927 }
928
929 Ok(normalized)
930}
931
932fn parse_autochat_args(rest: &str) -> Option<(usize, &str)> {
933 let rest = rest.trim();
934 if rest.is_empty() {
935 return None;
936 }
937
938 let mut parts = rest.splitn(2, char::is_whitespace);
939 let first = parts.next().unwrap_or("").trim();
940 if first.is_empty() {
941 return None;
942 }
943
944 if let Ok(count) = first.parse::<usize>() {
945 let task = parts.next().unwrap_or("").trim();
946 if task.is_empty() {
947 Some((count, AUTOCHAT_QUICK_DEMO_TASK))
948 } else {
949 Some((count, task))
950 }
951 } else {
952 Some((AUTOCHAT_DEFAULT_AGENTS, rest))
953 }
954}
955
956fn resolve_autochat_model(
957 cli_model: Option<&str>,
958 env_model: Option<&str>,
959 config_model: Option<&str>,
960 easy_go_requested: bool,
961) -> String {
962 if let Some(model) = cli_model.filter(|value| !value.trim().is_empty()) {
963 return model.to_string();
964 }
965 if easy_go_requested {
966 return GO_DEFAULT_MODEL.to_string();
967 }
968 if let Some(model) = env_model.filter(|value| !value.trim().is_empty()) {
969 return model.to_string();
970 }
971 if let Some(model) = config_model.filter(|value| !value.trim().is_empty()) {
972 return model.to_string();
973 }
974 "zai/glm-5".to_string()
975}
976
977fn build_relay_profiles(count: usize) -> Vec<RelayProfile> {
978 let mut profiles = Vec::with_capacity(count);
979 for idx in 0..count {
980 let name = format!("auto-agent-{}", idx + 1);
981
982 let instructions = format!(
983 "You are @{name}.\n\
984 Role policy: self-organize from task context and current handoff instead of assuming a fixed persona.\n\
985 Mission: advance the relay with concrete, high-signal next actions and clear ownership boundaries.\n\n\
986 This is a protocol-first relay conversation. Treat the incoming handoff as authoritative context.\n\
987 Keep your response concise, concrete, and useful for the next specialist.\n\
988 Include one clear recommendation for what the next agent should do.\n\
989 If the task scope is too large, explicitly call out missing specialties and handoff boundaries.",
990 );
991 let capabilities = vec![
992 "generalist".to_string(),
993 "self-organizing".to_string(),
994 "relay".to_string(),
995 "context-handoff".to_string(),
996 "autochat".to_string(),
997 ];
998
999 profiles.push(RelayProfile {
1000 name,
1001 instructions,
1002 capabilities,
1003 });
1004 }
1005 profiles
1006}
1007
1008fn truncate_with_ellipsis(value: &str, max_chars: usize) -> String {
1009 if max_chars == 0 {
1010 return String::new();
1011 }
1012
1013 let mut chars = value.chars();
1014 let mut output = String::new();
1015 for _ in 0..max_chars {
1016 if let Some(ch) = chars.next() {
1017 output.push(ch);
1018 } else {
1019 return value.to_string();
1020 }
1021 }
1022
1023 if chars.next().is_some() {
1024 format!("{output}...")
1025 } else {
1026 output
1027 }
1028}
1029
1030fn normalize_for_convergence(text: &str) -> String {
1031 let mut normalized = String::with_capacity(text.len().min(512));
1032 let mut last_was_space = false;
1033
1034 for ch in text.chars() {
1035 if ch.is_ascii_alphanumeric() {
1036 normalized.push(ch.to_ascii_lowercase());
1037 last_was_space = false;
1038 } else if ch.is_whitespace() && !last_was_space {
1039 normalized.push(' ');
1040 last_was_space = true;
1041 }
1042
1043 if normalized.len() >= 280 {
1044 break;
1045 }
1046 }
1047
1048 normalized.trim().to_string()
1049}
1050
1051async fn run_protocol_first_relay(
1052 agent_count: usize,
1053 task: &str,
1054 model_ref: &str,
1055 okr_id: Option<Uuid>,
1056 okr_run_id: Option<Uuid>,
1057) -> Result<AutochatCliResult> {
1058 let bus = AgentBus::new().into_arc();
1059
1060 crate::bus::s3_sink::spawn_bus_s3_sink(bus.clone());
1062
1063 let relay = ProtocolRelayRuntime::new(bus.clone());
1064
1065 let registry = crate::provider::ProviderRegistry::from_vault()
1066 .await
1067 .ok()
1068 .map(std::sync::Arc::new);
1069
1070 let mut planner_used = false;
1071 let profiles = if let Some(registry) = ®istry {
1072 if let Some(planned) =
1073 plan_relay_profiles_with_registry(task, model_ref, agent_count, registry).await
1074 {
1075 planner_used = true;
1076 planned
1077 } else {
1078 build_relay_profiles(agent_count)
1079 }
1080 } else {
1081 build_relay_profiles(agent_count)
1082 };
1083
1084 let relay_profiles: Vec<RelayAgentProfile> = profiles
1085 .iter()
1086 .map(|profile| RelayAgentProfile {
1087 name: profile.name.clone(),
1088 capabilities: profile.capabilities.clone(),
1089 })
1090 .collect();
1091
1092 let mut ordered_agents: Vec<String> = profiles
1093 .iter()
1094 .map(|profile| profile.name.clone())
1095 .collect();
1096 let mut sessions: HashMap<String, Session> = HashMap::new();
1097
1098 for profile in &profiles {
1099 let mut session = Session::new().await?;
1100 session.metadata.model = Some(model_ref.to_string());
1101 session.agent = profile.name.clone();
1102 session.bus = Some(bus.clone());
1103 session.add_message(Message {
1104 role: Role::System,
1105 content: vec![ContentPart::Text {
1106 text: profile.instructions.clone(),
1107 }],
1108 });
1109 sessions.insert(profile.name.clone(), session);
1110 }
1111
1112 if ordered_agents.len() < 2 {
1113 anyhow::bail!("Autochat needs at least 2 agents to relay.");
1114 }
1115
1116 relay.register_agents(&relay_profiles);
1117
1118 let kr_targets: std::collections::HashMap<String, f64> =
1120 if let (Some(okr_id_val), Some(_run_id)) = (okr_id, okr_run_id) {
1121 if let Ok(repo) = crate::okr::persistence::OkrRepository::from_config().await {
1122 if let Ok(Some(okr)) = repo.get_okr(okr_id_val).await {
1123 okr.key_results
1124 .iter()
1125 .map(|kr| (kr.id.to_string(), kr.target_value))
1126 .collect()
1127 } else {
1128 std::collections::HashMap::new()
1129 }
1130 } else {
1131 std::collections::HashMap::new()
1132 }
1133 } else {
1134 std::collections::HashMap::new()
1135 };
1136
1137 let mut kr_progress: std::collections::HashMap<String, f64> = std::collections::HashMap::new();
1138
1139 let mut baton = format!(
1140 "Task:\n{task}\n\nStart by proposing an execution strategy and one immediate next step."
1141 );
1142 let mut previous_normalized: Option<String> = None;
1143 let mut convergence_hits = 0usize;
1144 let mut turns = 0usize;
1145 let mut dynamic_spawn_count = 0usize;
1146 let mut status = "max_rounds_reached".to_string();
1147 let mut failure_note: Option<String> = None;
1148
1149 'relay_loop: for round in 1..=AUTOCHAT_MAX_ROUNDS {
1150 let mut idx = 0usize;
1151 while idx < ordered_agents.len() {
1152 let to = ordered_agents[idx].clone();
1153 let from = if idx == 0 {
1154 if round == 1 {
1155 "user".to_string()
1156 } else {
1157 ordered_agents[ordered_agents.len() - 1].clone()
1158 }
1159 } else {
1160 ordered_agents[idx - 1].clone()
1161 };
1162
1163 turns += 1;
1164 relay.send_handoff(&from, &to, &baton);
1165
1166 let Some(mut session) = sessions.remove(&to) else {
1167 status = "agent_error".to_string();
1168 failure_note = Some(format!("Relay agent @{to} session was unavailable."));
1169 break 'relay_loop;
1170 };
1171
1172 let output = match session.prompt(&baton).await {
1173 Ok(response) => response.text,
1174 Err(err) => {
1175 status = "agent_error".to_string();
1176 failure_note = Some(format!("Relay agent @{to} failed: {err}"));
1177 sessions.insert(to, session);
1178 break 'relay_loop;
1179 }
1180 };
1181
1182 sessions.insert(to.clone(), session);
1183
1184 let normalized = normalize_for_convergence(&output);
1185 if previous_normalized.as_deref() == Some(normalized.as_str()) {
1186 convergence_hits += 1;
1187 } else {
1188 convergence_hits = 0;
1189 }
1190 previous_normalized = Some(normalized);
1191
1192 baton = format!(
1193 "Relay task:\n{task}\n\nIncoming handoff from @{to}:\n{}\n\nContinue the work from this handoff. Keep your response focused and provide one concrete next-step instruction for the next agent.",
1194 truncate_with_ellipsis(&output, 3_500)
1195 );
1196
1197 if !kr_targets.is_empty() {
1199 let max_turns = ordered_agents.len() * AUTOCHAT_MAX_ROUNDS;
1200 let progress_ratio = (turns as f64 / max_turns as f64).min(1.0);
1201
1202 for (kr_id, target) in &kr_targets {
1203 let current = progress_ratio * target;
1204 let existing = kr_progress.get(kr_id).copied().unwrap_or(0.0);
1205 if current > existing {
1206 kr_progress.insert(kr_id.clone(), current);
1207 }
1208 }
1209
1210 if let Some(run_id) = okr_run_id {
1212 if let Ok(repo) = crate::okr::persistence::OkrRepository::from_config().await {
1213 if let Ok(Some(mut run)) = repo.get_run(run_id).await {
1214 if run.is_resumable() {
1215 run.iterations = turns as u32;
1216 for (kr_id, value) in &kr_progress {
1217 run.update_kr_progress(kr_id, *value);
1218 }
1219 run.status = crate::okr::OkrRunStatus::Running;
1220 let _ = repo.update_run(run).await;
1221 }
1222 }
1223 }
1224 }
1225 }
1226
1227 let can_attempt_spawn = dynamic_spawn_count < AUTOCHAT_MAX_DYNAMIC_SPAWNS
1228 && ordered_agents.len() < AUTOCHAT_MAX_AGENTS
1229 && output.len() >= AUTOCHAT_SPAWN_CHECK_MIN_CHARS;
1230
1231 if can_attempt_spawn
1232 && let Some(registry) = ®istry
1233 && let Some((profile, reason)) = decide_dynamic_spawn_with_registry(
1234 task,
1235 model_ref,
1236 &output,
1237 round,
1238 &ordered_agents,
1239 registry,
1240 )
1241 .await
1242 {
1243 match Session::new().await {
1244 Ok(mut spawned_session) => {
1245 spawned_session.metadata.model = Some(model_ref.to_string());
1246 spawned_session.agent = profile.name.clone();
1247 spawned_session.bus = Some(bus.clone());
1248 spawned_session.add_message(Message {
1249 role: Role::System,
1250 content: vec![ContentPart::Text {
1251 text: profile.instructions.clone(),
1252 }],
1253 });
1254
1255 relay.register_agents(&[RelayAgentProfile {
1256 name: profile.name.clone(),
1257 capabilities: profile.capabilities.clone(),
1258 }]);
1259
1260 ordered_agents.insert(idx + 1, profile.name.clone());
1261 sessions.insert(profile.name.clone(), spawned_session);
1262 dynamic_spawn_count += 1;
1263
1264 tracing::info!(
1265 agent = %profile.name,
1266 reason = %reason,
1267 "Dynamic relay spawn accepted"
1268 );
1269 }
1270 Err(err) => {
1271 tracing::warn!(
1272 agent = %profile.name,
1273 error = %err,
1274 "Dynamic relay spawn requested but failed"
1275 );
1276 }
1277 }
1278 }
1279
1280 if convergence_hits >= 2 {
1281 status = "converged".to_string();
1282 break 'relay_loop;
1283 }
1284
1285 idx += 1;
1286 }
1287 }
1288
1289 relay.shutdown_agents(&ordered_agents);
1290
1291 if let Some(run_id) = okr_run_id {
1293 if let Ok(repo) = crate::okr::persistence::OkrRepository::from_config().await {
1294 if let Ok(Some(mut run)) = repo.get_run(run_id).await {
1295 for (kr_id, value) in &kr_progress {
1297 run.update_kr_progress(kr_id, *value);
1298 }
1299
1300 let base_evidence = vec![
1302 format!("relay:{}", relay.relay_id()),
1303 format!("turns:{}", turns),
1304 format!("agents:{}", ordered_agents.len()),
1305 format!("status:{}", status),
1306 ];
1307
1308 let outcome_type = if status == "converged" {
1309 crate::okr::KrOutcomeType::FeatureDelivered
1310 } else {
1311 crate::okr::KrOutcomeType::Evidence
1312 };
1313
1314 for (kr_id_str, value) in &kr_progress {
1316 if let Some(kr_uuid) =
1318 parse_uuid_guarded(kr_id_str, "cli_relay_outcome_kr_link")
1319 {
1320 let kr_description = format!(
1321 "CLI relay outcome for KR {}: {} agents, {} turns, status={}",
1322 kr_id_str,
1323 ordered_agents.len(),
1324 turns,
1325 status
1326 );
1327 run.outcomes.push({
1328 let mut outcome = crate::okr::KrOutcome::new(kr_uuid, kr_description)
1329 .with_value(*value);
1330 outcome.run_id = Some(run.id);
1331 outcome.outcome_type = outcome_type;
1332 outcome.evidence = base_evidence.clone();
1333 outcome.source = "cli relay".to_string();
1334 outcome
1335 });
1336 }
1337 }
1338
1339 if status == "converged" {
1341 run.complete();
1342 } else if status == "agent_error" {
1343 run.status = crate::okr::OkrRunStatus::Failed;
1344 } else {
1345 run.status = crate::okr::OkrRunStatus::Completed;
1346 }
1347 let _ = repo.update_run(run).await;
1348 }
1349 }
1350 }
1351
1352 let mut summary = format!(
1353 "Autochat complete ({status}) — relay {} with {} agents over {} turns.\n\nFinal relay handoff:\n{}",
1354 relay.relay_id(),
1355 ordered_agents.len(),
1356 turns,
1357 truncate_with_ellipsis(&baton, 4_000)
1358 );
1359 if let Some(note) = &failure_note {
1360 summary.push_str(&format!("\n\nFailure detail: {note}"));
1361 }
1362 if planner_used {
1363 summary.push_str("\n\nTeam planning: model-organized profiles.");
1364 } else {
1365 summary.push_str("\n\nTeam planning: fallback self-organizing profiles.");
1366 }
1367 if dynamic_spawn_count > 0 {
1368 summary.push_str(&format!("\nDynamic relay spawns: {dynamic_spawn_count}"));
1369 }
1370
1371 Ok(AutochatCliResult {
1372 status,
1373 relay_id: relay.relay_id().to_string(),
1374 model: model_ref.to_string(),
1375 agent_count: ordered_agents.len(),
1376 turns,
1377 agents: ordered_agents,
1378 final_handoff: baton,
1379 summary,
1380 failure: failure_note,
1381 })
1382}
1383
1384#[cfg(test)]
1385mod tests {
1386 use super::PlannedRelayProfile;
1387 use super::{
1388 AUTOCHAT_QUICK_DEMO_TASK, PlannedRelayResponse, build_runtime_profile_from_plan,
1389 command_with_optional_args, extract_json_payload, is_easy_go_command,
1390 normalize_cli_go_command, normalize_go_task_input, parse_autochat_args,
1391 resolve_autochat_model, validate_easy_go_task,
1392 };
1393
1394 #[test]
1395 fn normalize_go_maps_to_autochat_with_count_and_task() {
1396 assert_eq!(
1397 normalize_cli_go_command("/go 4 build protocol relay"),
1398 "/autochat 4 build protocol relay"
1399 );
1400 }
1401
1402 #[test]
1403 fn normalize_go_count_only_uses_demo_task() {
1404 assert_eq!(
1405 normalize_cli_go_command("/go 4"),
1406 format!("/autochat 4 {AUTOCHAT_QUICK_DEMO_TASK}")
1407 );
1408 }
1409
1410 #[test]
1411 fn parse_autochat_args_supports_default_count() {
1412 assert_eq!(
1413 parse_autochat_args("build a relay").expect("valid args"),
1414 (3, "build a relay")
1415 );
1416 }
1417
1418 #[test]
1419 fn parse_autochat_args_supports_explicit_count() {
1420 assert_eq!(
1421 parse_autochat_args("4 build a relay").expect("valid args"),
1422 (4, "build a relay")
1423 );
1424 }
1425
1426 #[test]
1427 fn normalize_go_task_collapses_whitespace() {
1428 assert_eq!(
1429 normalize_go_task_input(" implement api\ncompat routes\twith tests "),
1430 "implement api compat routes with tests"
1431 );
1432 }
1433
1434 #[test]
1435 fn validate_go_task_rejects_pasted_run_output() {
1436 let pasted =
1437 "Task: foo Progress: 0/7 stories Iterations: 7/10 Incomplete stories: ... Next steps:";
1438 assert!(validate_easy_go_task(pasted).is_err());
1439 }
1440
1441 #[test]
1442 fn command_with_optional_args_avoids_prefix_collision() {
1443 assert_eq!(command_with_optional_args("/autochatty", "/autochat"), None);
1444 }
1445
1446 #[test]
1447 fn easy_go_detection_handles_aliases() {
1448 assert!(is_easy_go_command("/go 4 task"));
1449 assert!(is_easy_go_command("/team 4 task"));
1450 assert!(!is_easy_go_command("/autochat 4 task"));
1451 }
1452
1453 #[test]
1454 fn easy_go_defaults_to_minimax_when_model_not_set() {
1455 assert_eq!(
1456 resolve_autochat_model(None, None, Some("zai/glm-5"), true),
1457 "minimax-credits/MiniMax-M2.5-highspeed"
1458 );
1459 }
1460
1461 #[test]
1462 fn explicit_model_wins_over_easy_go_default() {
1463 assert_eq!(
1464 resolve_autochat_model(Some("zai/glm-5"), None, None, true),
1465 "zai/glm-5"
1466 );
1467 }
1468
1469 #[test]
1470 fn extract_json_payload_parses_markdown_wrapped_json() {
1471 let wrapped = "Here is the plan:\n```json\n{\"profiles\":[{\"name\":\"auto-db\",\"specialty\":\"database\",\"mission\":\"Own schema and queries\",\"capabilities\":[\"sql\",\"indexing\"]}]}\n```";
1472 let parsed: PlannedRelayResponse =
1473 extract_json_payload(wrapped).expect("should parse wrapped JSON");
1474 assert_eq!(parsed.profiles.len(), 1);
1475 assert_eq!(parsed.profiles[0].name, "auto-db");
1476 }
1477
1478 #[test]
1479 fn build_runtime_profile_normalizes_and_deduplicates_name() {
1480 let planned = PlannedRelayProfile {
1481 name: "Data Specialist".to_string(),
1482 specialty: "data engineering".to_string(),
1483 mission: "Prepare datasets for downstream coding".to_string(),
1484 capabilities: vec!["ETL".to_string(), "sql".to_string()],
1485 };
1486
1487 let profile =
1488 build_runtime_profile_from_plan(planned, &["auto-data-specialist".to_string()])
1489 .expect("profile should be built");
1490
1491 assert_eq!(profile.name, "auto-data-specialist-2");
1492 assert!(profile.capabilities.iter().any(|cap| cap == "relay"));
1493 assert!(profile.capabilities.iter().any(|cap| cap == "autochat"));
1494 }
1495}