1use std::io::{self, BufRead, IsTerminal, Write};
7use std::path::PathBuf;
8
9use anyhow::{Context, Result};
10
11const VERSION: &str = env!("CARGO_PKG_VERSION");
14
15fn use_color() -> bool {
17 io::stdout().is_terminal()
18}
19
20const RESET: &str = "\x1b[0m";
23const BOLD: &str = "\x1b[1m";
24const DIM: &str = "\x1b[2m";
25const BRIGHT_CYAN: &str = "\x1b[96m";
26const CYAN: &str = "\x1b[36m";
27const GREEN: &str = "\x1b[32m";
28const BRIGHT_GREEN: &str = "\x1b[92m";
29const RED: &str = "\x1b[31m";
30const YELLOW: &str = "\x1b[33m";
31
32fn c(code: &str, text: &str) -> String {
33 if use_color() {
34 format!("{code}{text}{RESET}")
35 } else {
36 text.to_string()
37 }
38}
39
40fn bold(text: &str) -> String {
41 c(BOLD, text)
42}
43
44fn dim(text: &str) -> String {
45 c(DIM, text)
46}
47
48const BANNER_LINES: [(&str, &str); 6] = [
51 (BRIGHT_CYAN, " ██╗ ██╗███████╗███████╗"),
52 (CYAN, " ██║ ██║██╔════╝██╔════╝"),
53 (GREEN, " ██║ ██║█████╗ █████╗ "),
54 (BRIGHT_GREEN, " ██║ ██║██╔══╝ ██╔══╝ "),
55 (GREEN, " ███████╗██║██║ ███████╗"),
56 (CYAN, " ╚══════╝╚═╝╚═╝ ╚══════╝"),
57];
58
59pub fn print_banner() {
60 eprintln!();
61 let colored = use_color();
62 for (color, line) in &BANNER_LINES {
63 if colored {
64 eprintln!("{color}{line}{RESET}");
65 } else {
66 eprintln!("{line}");
67 }
68 }
69 eprintln!();
70 eprintln!(" {}", dim("Agent Operating System"));
71 eprintln!(" {}", dim(&format!("v{VERSION}")));
72 eprintln!();
73}
74
75pub fn print_quick_help() {
78 print_banner();
79 eprintln!(
80 " {}",
81 dim("Run `life setup` to configure, or use a command below.")
82 );
83 eprintln!();
84 eprintln!(" {}", bold("Commands"));
85 eprintln!();
86 eprintln!(
87 " {} configure providers & keys",
88 c(CYAN, "life setup")
89 );
90 eprintln!(" {} deploy an agent to cloud", c(CYAN, "life deploy"));
91 eprintln!(" {} check deployed agents", c(CYAN, "life status"));
92 eprintln!(" {} list deployed agents", c(CYAN, "life list"));
93 eprintln!(" {} stream service logs", c(CYAN, "life logs"));
94 eprintln!(" {} scale agent services", c(CYAN, "life scale"));
95 eprintln!(" {} cost tracking", c(CYAN, "life cost"));
96 eprintln!(" {} manage relay daemon", c(CYAN, "life relay"));
97 eprintln!();
98 eprintln!(" {}", bold("Agent Runtime"));
99 eprintln!();
100 eprintln!(" {} interactive TUI chat", c(GREEN, "arcan chat"));
101 eprintln!(" {} REPL mode", c(GREEN, "arcan shell"));
102 eprintln!(" {} start daemon", c(GREEN, "arcan serve"));
103 eprintln!();
104 eprintln!(" {}", dim("https://docs.broomva.tech/docs/life"));
105 eprintln!();
106}
107
108fn print_system_info() {
111 let platform = format!("{} {}", os_name(), std::env::consts::ARCH);
112 let crate_count = 87;
113 let tool_count = 26;
114 let skill_count = 307;
115
116 let w = 47; let top = format!(" ┌{}┐", "─".repeat(w));
118 let bot = format!(" └{}┘", "─".repeat(w));
119
120 eprintln!("{top}");
121 info_row("version", VERSION, w);
122 info_row("platform", &platform, w);
123 info_row("crates", &format!("{crate_count}"), w);
124 info_row("tools", &format!("{tool_count}"), w);
125 info_row("skills", &format!("{skill_count}"), w);
126 eprintln!(" │{}│", " ".repeat(w));
127 eprintln!(
128 " │ {}{}│",
129 bold("Modules"),
130 " ".repeat(w - 2 - "Modules".len())
131 );
132 let modules = "arcan · lago · praxis · autonomic · haima";
133 eprintln!(
134 " │ {}{}│",
135 c(DIM, modules),
136 " ".repeat(w - 2 - modules.len())
137 );
138 let modules2 = "nous · anima · vigil · spaces · opsis";
139 eprintln!(
140 " │ {}{}│",
141 c(DIM, modules2),
142 " ".repeat(w - 2 - modules2.len())
143 );
144 eprintln!("{bot}");
145 eprintln!();
146}
147
148fn info_row(label: &str, value: &str, width: usize) {
149 let content = format!(" {:<12}{}", label, value);
150 let pad = width.saturating_sub(content.len());
151 eprintln!(" │{}{}│", content, " ".repeat(pad));
152}
153
154fn os_name() -> &'static str {
155 if cfg!(target_os = "macos") {
156 "macOS"
157 } else if cfg!(target_os = "linux") {
158 "Linux"
159 } else if cfg!(target_os = "windows") {
160 "Windows"
161 } else {
162 "Unknown"
163 }
164}
165
166fn config_dir() -> PathBuf {
169 dirs::home_dir()
170 .expect("cannot determine home directory")
171 .join(".life")
172}
173
174fn config_path() -> PathBuf {
175 config_dir().join("config.toml")
176}
177
178fn config_exists() -> bool {
179 config_path().is_file()
180}
181
182#[derive(serde::Serialize, serde::Deserialize, Default)]
183struct LifeConfig {
184 provider: ProviderConfig,
185 #[serde(default)]
186 consciousness: ConsciousnessConfig,
187 #[serde(default)]
188 arcan: ArcanConfig,
189}
190
191#[derive(serde::Serialize, serde::Deserialize, Default)]
192struct ProviderConfig {
193 name: String,
194 #[serde(skip_serializing_if = "Option::is_none")]
195 api_key: Option<String>,
196 model: String,
197 #[serde(skip_serializing_if = "Option::is_none")]
198 base_url: Option<String>,
199}
200
201#[derive(serde::Serialize, serde::Deserialize)]
202struct ConsciousnessConfig {
203 enabled: bool,
204}
205
206impl Default for ConsciousnessConfig {
207 fn default() -> Self {
208 Self { enabled: true }
209 }
210}
211
212#[derive(serde::Serialize, serde::Deserialize)]
213struct ArcanConfig {
214 port: u16,
215}
216
217impl Default for ArcanConfig {
218 fn default() -> Self {
219 Self { port: 3000 }
220 }
221}
222
223fn prompt(message: &str) -> Result<String> {
226 eprint!("{message}");
227 io::stderr().flush()?;
228 let mut buf = String::new();
229 io::stdin()
230 .lock()
231 .read_line(&mut buf)
232 .context("failed to read input")?;
233 Ok(buf.trim().to_string())
234}
235
236fn prompt_with_default(message: &str, default: &str) -> Result<String> {
237 let input = prompt(&format!("{message} {}: ", dim(&format!("[{default}]"))))?;
238 if input.is_empty() {
239 Ok(default.to_string())
240 } else {
241 Ok(input)
242 }
243}
244
245fn prompt_secret(message: &str) -> Result<String> {
246 eprint!("{message}");
247 io::stderr().flush()?;
248
249 #[cfg(unix)]
251 let stty_off = std::process::Command::new("stty")
252 .arg("-echo")
253 .stdin(std::process::Stdio::inherit())
254 .status()
255 .is_ok();
256
257 let mut buf = String::new();
258 let result = io::stdin().lock().read_line(&mut buf);
259
260 #[cfg(unix)]
262 if stty_off {
263 let _ = std::process::Command::new("stty")
264 .arg("echo")
265 .stdin(std::process::Stdio::inherit())
266 .status();
267 }
268
269 eprintln!(); result.context("failed to read input")?;
272 Ok(buf.trim().to_string())
273}
274
275#[derive(Debug, Clone, Copy)]
278enum Provider {
279 Anthropic,
280 OpenAi,
281 Vercel,
282 Ollama,
283 Mock,
284}
285
286impl Provider {
287 fn name(&self) -> &'static str {
288 match self {
289 Self::Anthropic => "anthropic",
290 Self::OpenAi => "openai",
291 Self::Vercel => "vercel",
292 Self::Ollama => "ollama",
293 Self::Mock => "mock",
294 }
295 }
296
297 fn needs_api_key(&self) -> bool {
298 matches!(self, Self::Anthropic | Self::OpenAi | Self::Vercel)
299 }
300
301 fn models(&self) -> &[(&str, &str)] {
302 match self {
303 Self::Anthropic => &[
304 ("claude-sonnet-4-5-20250929", "recommended"),
305 ("claude-haiku-4.5", "fast & cheap"),
306 ("claude-opus-4-6", "most capable"),
307 ],
308 Self::OpenAi => &[
309 ("gpt-4.1", "recommended"),
310 ("gpt-4.1-mini", "fast & cheap"),
311 ("gpt-4o", "multimodal"),
312 ],
313 Self::Vercel => &[
314 ("anthropic/claude-sonnet-4-5", "recommended"),
315 ("openai/gpt-4.1", "alternative"),
316 ],
317 Self::Ollama => &[("llama3.2", "recommended"), ("custom", "enter your own")],
318 Self::Mock => &[("mock-provider", "testing")],
319 }
320 }
321
322 fn key_hint(&self) -> &'static str {
323 match self {
324 Self::Anthropic => "sk-ant-...",
325 Self::OpenAi => "sk-...",
326 Self::Vercel => "gateway key",
327 _ => "",
328 }
329 }
330}
331
332fn select_provider() -> Result<Provider> {
333 eprintln!(" {}", bold("Choose your LLM provider:"));
334 eprintln!();
335 eprintln!(
336 " {} Anthropic (Claude) {}",
337 c(BRIGHT_CYAN, "1"),
338 dim("— recommended")
339 );
340 eprintln!(" {} OpenAI (GPT)", c(BRIGHT_CYAN, "2"));
341 eprintln!(
342 " {} Vercel AI Gateway {}",
343 c(BRIGHT_CYAN, "3"),
344 dim("— multi-provider routing")
345 );
346 eprintln!(
347 " {} Ollama {}",
348 c(BRIGHT_CYAN, "4"),
349 dim("— local, no API key")
350 );
351 eprintln!(
352 " {} Mock {}",
353 c(BRIGHT_CYAN, "5"),
354 dim("— testing, no API key")
355 );
356 eprintln!();
357
358 loop {
359 let input = prompt_with_default(" >", "1")?;
360 match input.as_str() {
361 "1" => return Ok(Provider::Anthropic),
362 "2" => return Ok(Provider::OpenAi),
363 "3" => return Ok(Provider::Vercel),
364 "4" => return Ok(Provider::Ollama),
365 "5" => return Ok(Provider::Mock),
366 _ => {
367 eprintln!(" {} Enter a number 1-5.", c(YELLOW, "!"));
368 }
369 }
370 }
371}
372
373fn prompt_api_key(provider: &Provider) -> Result<Option<String>> {
374 if !provider.needs_api_key() {
375 return Ok(None);
376 }
377
378 eprintln!();
379 let hint = provider.key_hint();
380 loop {
381 let key = prompt_secret(&format!(
382 " Enter your {} API key ({hint}): ",
383 bold(provider.name())
384 ))?;
385 if key.is_empty() {
386 eprintln!(
387 " {} API key is required for {}.",
388 c(YELLOW, "!"),
389 provider.name()
390 );
391 continue;
392 }
393 let visible = if key.len() > 8 {
395 format!("{}...{}", &key[..4], &key[key.len() - 4..])
396 } else {
397 "****".to_string()
398 };
399 eprintln!(" {} Key: {}", c(GREEN, "ok"), dim(&visible));
400 return Ok(Some(key));
401 }
402}
403
404fn prompt_base_url(provider: &Provider) -> Result<Option<String>> {
405 match provider {
406 Provider::Ollama => {
407 eprintln!();
408 let url = prompt_with_default(" Ollama base URL", "http://localhost:11434")?;
409 Ok(Some(url))
410 }
411 Provider::Vercel => {
412 eprintln!();
413 let url = prompt_with_default(" Vercel AI Gateway URL", "https://gateway.vercel.ai")?;
414 Ok(Some(url))
415 }
416 _ => Ok(None),
417 }
418}
419
420fn select_model(provider: &Provider) -> Result<String> {
421 let models = provider.models();
422
423 if models.len() == 1 {
425 let m = models[0].0;
426 eprintln!();
427 eprintln!(" Model: {}", c(GREEN, m));
428 return Ok(m.to_string());
429 }
430
431 eprintln!();
432 eprintln!(" {}", bold("Choose a model:"));
433 eprintln!();
434 for (i, (name, desc)) in models.iter().enumerate() {
435 let num = format!("{}", i + 1);
436 let default_marker = if i == 0 { " (default)" } else { "" };
437 eprintln!(
438 " {} {:<36} {}{}",
439 c(BRIGHT_CYAN, &num),
440 name,
441 dim(desc),
442 dim(default_marker)
443 );
444 }
445 eprintln!();
446
447 loop {
448 let input = prompt_with_default(" >", "1")?;
449
450 if models.iter().any(|(m, _)| *m == input) {
452 return Ok(input);
453 }
454
455 if let Ok(n) = input.parse::<usize>() {
456 if n >= 1 && n <= models.len() {
457 let chosen = models[n - 1].0;
458 if chosen == "custom" {
460 eprintln!();
461 let custom = prompt(" Enter model name: ")?;
462 if custom.is_empty() {
463 eprintln!(" {} Model name cannot be empty.", c(YELLOW, "!"));
464 continue;
465 }
466 return Ok(custom);
467 }
468 return Ok(chosen.to_string());
469 }
470 }
471 eprintln!(" {} Enter a number 1-{}.", c(YELLOW, "!"), models.len());
472 }
473}
474
475fn save_config(
478 provider: &Provider,
479 api_key: &Option<String>,
480 model: &str,
481 base_url: &Option<String>,
482) -> Result<()> {
483 let dir = config_dir();
484 std::fs::create_dir_all(&dir).context("failed to create ~/.life directory")?;
485
486 let cfg = LifeConfig {
487 provider: ProviderConfig {
488 name: provider.name().to_string(),
489 api_key: api_key.clone(),
490 model: model.to_string(),
491 base_url: base_url.clone(),
492 },
493 consciousness: ConsciousnessConfig::default(),
494 arcan: ArcanConfig::default(),
495 };
496
497 let content = toml::to_string_pretty(&cfg).context("failed to serialize config")?;
498 let path = config_path();
499 std::fs::write(&path, &content)
500 .with_context(|| format!("failed to write {}", path.display()))?;
501
502 #[cfg(unix)]
504 {
505 use std::os::unix::fs::PermissionsExt;
506 std::fs::set_permissions(&path, std::fs::Permissions::from_mode(0o600))
507 .context("failed to set config permissions")?;
508 }
509
510 eprintln!();
511 eprintln!(
512 " {} Config saved to {}",
513 c(GREEN, "ok"),
514 dim(&path.display().to_string())
515 );
516
517 Ok(())
518}
519
520async fn test_connection(
523 provider: &Provider,
524 api_key: &Option<String>,
525 model: &str,
526 base_url: &Option<String>,
527) -> Result<bool> {
528 eprintln!();
529 eprint!(" {} Testing connection...", c(CYAN, "◎"));
530 io::stderr().flush()?;
531
532 let client = reqwest::Client::builder()
533 .timeout(std::time::Duration::from_secs(15))
534 .build()?;
535
536 let result = match provider {
537 Provider::Anthropic => {
538 let key = api_key.as_deref().unwrap_or("");
539 client
540 .post("https://api.anthropic.com/v1/messages")
541 .header("x-api-key", key)
542 .header("anthropic-version", "2023-06-01")
543 .header("content-type", "application/json")
544 .json(&serde_json::json!({
545 "model": model,
546 "max_tokens": 1,
547 "messages": [{"role": "user", "content": "ping"}]
548 }))
549 .send()
550 .await
551 }
552 Provider::OpenAi => {
553 let key = api_key.as_deref().unwrap_or("");
554 client
555 .post("https://api.openai.com/v1/chat/completions")
556 .header("Authorization", format!("Bearer {key}"))
557 .header("content-type", "application/json")
558 .json(&serde_json::json!({
559 "model": model,
560 "max_tokens": 1,
561 "messages": [{"role": "user", "content": "ping"}]
562 }))
563 .send()
564 .await
565 }
566 Provider::Vercel => {
567 let url = base_url.as_deref().unwrap_or("https://gateway.vercel.ai");
568 let key = api_key.as_deref().unwrap_or("");
569 client
570 .post(format!("{url}/v1/chat/completions"))
571 .header("Authorization", format!("Bearer {key}"))
572 .header("content-type", "application/json")
573 .json(&serde_json::json!({
574 "model": model,
575 "max_tokens": 1,
576 "messages": [{"role": "user", "content": "ping"}]
577 }))
578 .send()
579 .await
580 }
581 Provider::Ollama => {
582 let url = base_url.as_deref().unwrap_or("http://localhost:11434");
583 client.get(format!("{url}/api/tags")).send().await
584 }
585 Provider::Mock => {
586 eprint!("\r");
588 eprintln!(
589 " {} Connected to {} ({})",
590 c(GREEN, "✓"),
591 bold("mock"),
592 model
593 );
594 return Ok(true);
595 }
596 };
597
598 match result {
599 Ok(resp) if resp.status().is_success() || resp.status().as_u16() == 200 => {
600 eprint!("\r");
601 eprintln!(
602 " {} Connected to {} ({})",
603 c(GREEN, "✓"),
604 bold(provider.name()),
605 model
606 );
607 Ok(true)
608 }
609 Ok(resp) => {
610 let status = resp.status();
611 let body = resp.text().await.unwrap_or_default();
612
613 let msg = serde_json::from_str::<serde_json::Value>(&body)
615 .ok()
616 .and_then(|v| {
617 v.get("error").and_then(|e| {
618 e.get("message")
619 .or(Some(e))
620 .and_then(|m| m.as_str().map(String::from))
621 })
622 })
623 .unwrap_or_else(|| format!("HTTP {status}"));
624
625 eprint!("\r");
626 eprintln!(" {} Connection failed: {}", c(RED, "✗"), msg);
627 eprintln!(" {}", dim("Run `life setup` to reconfigure."));
628 Ok(false)
629 }
630 Err(e) => {
631 eprint!("\r");
632 eprintln!(" {} Connection failed: {e}", c(RED, "✗"));
633 eprintln!(" {}", dim("Run `life setup` to reconfigure."));
634 Ok(false)
635 }
636 }
637}
638
639fn print_success(provider: &Provider, api_key: &Option<String>) {
642 eprintln!();
643 eprintln!(" {}", c(GREEN, "✓ Setup complete!"));
644 eprintln!();
645 eprintln!(" {}", bold("Quick start"));
646 eprintln!();
647 eprintln!(" {} reconfigure", c(CYAN, "life setup"));
648 eprintln!(" {} interactive TUI chat", c(CYAN, "arcan chat"));
649 eprintln!(" {} REPL mode", c(CYAN, "arcan shell"));
650 eprintln!(" {} start daemon", c(CYAN, "arcan serve"));
651 eprintln!(" {} deploy to cloud", c(CYAN, "life deploy"));
652 eprintln!(" {} check deployments", c(CYAN, "life status"));
653 eprintln!();
654
655 if let Some(key) = api_key {
657 let env_var = match provider {
658 Provider::Anthropic => "ANTHROPIC_API_KEY",
659 Provider::OpenAi => "OPENAI_API_KEY",
660 Provider::Vercel => "VERCEL_AI_GATEWAY_KEY",
661 _ => "",
662 };
663 if !env_var.is_empty() {
664 let visible = if key.len() > 12 {
665 format!("{}...{}", &key[..8], &key[key.len() - 4..])
666 } else {
667 "****".to_string()
668 };
669 eprintln!(" {}", bold("Environment"));
670 eprintln!();
671 eprintln!(" export {env_var}={visible}");
672 eprintln!();
673 }
674 }
675
676 eprintln!(" Or run directly:");
677 eprintln!();
678 eprintln!(" {}", c(GREEN, "arcan chat"));
679 eprintln!();
680}
681
682pub async fn run() -> Result<()> {
685 print_banner();
686 print_system_info();
687
688 if config_exists() {
690 let answer = prompt(&format!(
691 " Existing config found at {}. Reconfigure? {}: ",
692 dim(&config_path().display().to_string()),
693 dim("[y/N]")
694 ))?;
695 if !matches!(answer.to_lowercase().as_str(), "y" | "yes") {
696 eprintln!();
697 eprintln!(" {} Keeping existing configuration.", c(GREEN, "ok"));
698 eprintln!(" Run {} to start.", c(CYAN, "arcan chat"));
699 eprintln!();
700 return Ok(());
701 }
702 eprintln!();
703 }
704
705 let provider = select_provider()?;
707
708 let api_key = prompt_api_key(&provider)?;
710
711 let base_url = prompt_base_url(&provider)?;
713
714 let model = select_model(&provider)?;
716
717 save_config(&provider, &api_key, &model, &base_url)?;
719
720 let _ok = test_connection(&provider, &api_key, &model, &base_url).await?;
722
723 print_success(&provider, &api_key);
725
726 Ok(())
727}