1use std::io::{self, BufRead, IsTerminal, Write};
7use std::path::PathBuf;
8
9use anyhow::{Context, Result};
10
11const VERSION: &str = env!("CARGO_PKG_VERSION");
14
15fn use_color() -> bool {
17 io::stdout().is_terminal()
18}
19
20const RESET: &str = "\x1b[0m";
23const BOLD: &str = "\x1b[1m";
24const DIM: &str = "\x1b[2m";
25const BRIGHT_CYAN: &str = "\x1b[96m";
26const CYAN: &str = "\x1b[36m";
27const GREEN: &str = "\x1b[32m";
28const BRIGHT_GREEN: &str = "\x1b[92m";
29const RED: &str = "\x1b[31m";
30const YELLOW: &str = "\x1b[33m";
31
32fn c(code: &str, text: &str) -> String {
33 if use_color() {
34 format!("{code}{text}{RESET}")
35 } else {
36 text.to_string()
37 }
38}
39
40fn bold(text: &str) -> String {
41 c(BOLD, text)
42}
43
44fn dim(text: &str) -> String {
45 c(DIM, text)
46}
47
48const BANNER_LINES: [(&str, &str); 6] = [
51 (BRIGHT_CYAN, " ██╗ ██╗███████╗███████╗"),
52 (CYAN, " ██║ ██║██╔════╝██╔════╝"),
53 (GREEN, " ██║ ██║█████╗ █████╗ "),
54 (BRIGHT_GREEN, " ██║ ██║██╔══╝ ██╔══╝ "),
55 (GREEN, " ███████╗██║██║ ███████╗"),
56 (CYAN, " ╚══════╝╚═╝╚═╝ ╚══════╝"),
57];
58
59pub fn print_banner() {
60 eprintln!();
61 let colored = use_color();
62 for (color, line) in &BANNER_LINES {
63 if colored {
64 eprintln!("{color}{line}{RESET}");
65 } else {
66 eprintln!("{line}");
67 }
68 }
69 eprintln!();
70 eprintln!(" {}", dim("Agent Operating System"));
71 eprintln!(" {}", dim(&format!("v{VERSION}")));
72 eprintln!();
73}
74
75pub fn print_quick_help() {
78 print_banner();
79 eprintln!(
80 " {}",
81 dim("Run `life setup` to configure, or use a command below.")
82 );
83 eprintln!();
84 eprintln!(" {}", bold("Commands"));
85 eprintln!();
86 eprintln!(
87 " {} configure providers & keys",
88 c(CYAN, "life setup")
89 );
90 eprintln!(" {} deploy an agent to cloud", c(CYAN, "life deploy"));
91 eprintln!(" {} check deployed agents", c(CYAN, "life status"));
92 eprintln!(" {} list deployed agents", c(CYAN, "life list"));
93 eprintln!(" {} stream service logs", c(CYAN, "life logs"));
94 eprintln!(" {} scale agent services", c(CYAN, "life scale"));
95 eprintln!(" {} cost tracking", c(CYAN, "life cost"));
96 eprintln!(" {} manage relay daemon", c(CYAN, "life relay"));
97 eprintln!();
98 eprintln!(" {}", bold("Agent Runtime"));
99 eprintln!();
100 eprintln!(" {} interactive TUI chat", c(GREEN, "arcan chat"));
101 eprintln!(" {} REPL mode", c(GREEN, "arcan shell"));
102 eprintln!(" {} start daemon", c(GREEN, "arcan serve"));
103 eprintln!();
104 eprintln!(" {}", dim("https://docs.broomva.tech/docs/life"));
105 eprintln!();
106}
107
108fn print_system_info() {
111 let platform = format!("{} {}", os_name(), std::env::consts::ARCH);
112 let crate_count = 87;
113 let tool_count = 26;
114 let skill_count = 307;
115
116 let w = 47; let top = format!(" ┌{}┐", "─".repeat(w));
118 let bot = format!(" └{}┘", "─".repeat(w));
119
120 eprintln!("{top}");
121 info_row("version", VERSION, w);
122 info_row("platform", &platform, w);
123 info_row("crates", &format!("{crate_count}"), w);
124 info_row("tools", &format!("{tool_count}"), w);
125 info_row("skills", &format!("{skill_count}"), w);
126 eprintln!(" │{}│", " ".repeat(w));
127 eprintln!(
128 " │ {}{}│",
129 bold("Modules"),
130 " ".repeat(w - 2 - "Modules".len())
131 );
132 let modules = "arcan · lago · praxis · autonomic · haima";
133 eprintln!(
134 " │ {}{}│",
135 c(DIM, modules),
136 " ".repeat(w - 2 - modules.len())
137 );
138 let modules2 = "nous · anima · vigil · spaces · opsis";
139 eprintln!(
140 " │ {}{}│",
141 c(DIM, modules2),
142 " ".repeat(w - 2 - modules2.len())
143 );
144 eprintln!("{bot}");
145 eprintln!();
146}
147
148fn info_row(label: &str, value: &str, width: usize) {
149 let content = format!(" {:<12}{}", label, value);
150 let pad = width.saturating_sub(content.len());
151 eprintln!(" │{}{}│", content, " ".repeat(pad));
152}
153
154fn os_name() -> &'static str {
155 if cfg!(target_os = "macos") {
156 "macOS"
157 } else if cfg!(target_os = "linux") {
158 "Linux"
159 } else if cfg!(target_os = "windows") {
160 "Windows"
161 } else {
162 "Unknown"
163 }
164}
165
166fn config_dir() -> PathBuf {
169 dirs::home_dir()
170 .expect("cannot determine home directory")
171 .join(".life")
172}
173
174fn config_path() -> PathBuf {
175 config_dir().join("config.toml")
176}
177
178fn config_exists() -> bool {
179 config_path().is_file()
180}
181
182#[derive(serde::Serialize, serde::Deserialize, Default)]
183struct LifeConfig {
184 provider: ProviderConfig,
185 #[serde(default)]
186 consciousness: ConsciousnessConfig,
187 #[serde(default)]
188 arcan: ArcanConfig,
189}
190
191#[derive(serde::Serialize, serde::Deserialize, Default)]
192struct ProviderConfig {
193 name: String,
194 model: String,
195 #[serde(skip_serializing_if = "Option::is_none")]
196 base_url: Option<String>,
197}
198
199#[derive(serde::Serialize, serde::Deserialize)]
200struct ConsciousnessConfig {
201 enabled: bool,
202}
203
204impl Default for ConsciousnessConfig {
205 fn default() -> Self {
206 Self { enabled: true }
207 }
208}
209
210#[derive(serde::Serialize, serde::Deserialize)]
211struct ArcanConfig {
212 port: u16,
213}
214
215impl Default for ArcanConfig {
216 fn default() -> Self {
217 Self { port: 3000 }
218 }
219}
220
221fn prompt(message: &str) -> Result<String> {
224 eprint!("{message}");
225 io::stderr().flush()?;
226 let mut buf = String::new();
227 io::stdin()
228 .lock()
229 .read_line(&mut buf)
230 .context("failed to read input")?;
231 Ok(buf.trim().to_string())
232}
233
234fn prompt_with_default(message: &str, default: &str) -> Result<String> {
235 let input = prompt(&format!("{message} {}: ", dim(&format!("[{default}]"))))?;
236 if input.is_empty() {
237 Ok(default.to_string())
238 } else {
239 Ok(input)
240 }
241}
242
243fn prompt_secret(message: &str) -> Result<String> {
244 eprint!("{message}");
245 io::stderr().flush()?;
246
247 #[cfg(unix)]
249 let stty_off = std::process::Command::new("stty")
250 .arg("-echo")
251 .stdin(std::process::Stdio::inherit())
252 .status()
253 .is_ok();
254
255 let mut buf = String::new();
256 let result = io::stdin().lock().read_line(&mut buf);
257
258 #[cfg(unix)]
260 if stty_off {
261 let _ = std::process::Command::new("stty")
262 .arg("echo")
263 .stdin(std::process::Stdio::inherit())
264 .status();
265 }
266
267 eprintln!(); result.context("failed to read input")?;
270 Ok(buf.trim().to_string())
271}
272
273#[derive(Debug, Clone, Copy)]
276enum Provider {
277 Anthropic,
278 OpenAi,
279 Vercel,
280 Ollama,
281 Mock,
282}
283
284impl Provider {
285 fn name(&self) -> &'static str {
286 match self {
287 Self::Anthropic => "anthropic",
288 Self::OpenAi => "openai",
289 Self::Vercel => "vercel",
290 Self::Ollama => "ollama",
291 Self::Mock => "mock",
292 }
293 }
294
295 fn needs_api_key(&self) -> bool {
296 matches!(self, Self::Anthropic | Self::OpenAi | Self::Vercel)
297 }
298
299 fn models(&self) -> &[(&str, &str)] {
300 match self {
301 Self::Anthropic => &[
302 ("claude-sonnet-4-5-20250929", "recommended"),
303 ("claude-haiku-4.5", "fast & cheap"),
304 ("claude-opus-4-6", "most capable"),
305 ],
306 Self::OpenAi => &[
307 ("gpt-4.1", "recommended"),
308 ("gpt-4.1-mini", "fast & cheap"),
309 ("gpt-4o", "multimodal"),
310 ],
311 Self::Vercel => &[
312 ("anthropic/claude-sonnet-4-5", "recommended"),
313 ("openai/gpt-4.1", "alternative"),
314 ],
315 Self::Ollama => &[("llama3.2", "recommended"), ("custom", "enter your own")],
316 Self::Mock => &[("mock-provider", "testing")],
317 }
318 }
319
320 fn key_hint(&self) -> &'static str {
321 match self {
322 Self::Anthropic => "sk-ant-...",
323 Self::OpenAi => "sk-...",
324 Self::Vercel => "gateway key",
325 _ => "",
326 }
327 }
328}
329
330fn select_provider() -> Result<Provider> {
331 eprintln!(" {}", bold("Choose your LLM provider:"));
332 eprintln!();
333 eprintln!(
334 " {} Anthropic (Claude) {}",
335 c(BRIGHT_CYAN, "1"),
336 dim("— recommended")
337 );
338 eprintln!(" {} OpenAI (GPT)", c(BRIGHT_CYAN, "2"));
339 eprintln!(
340 " {} Vercel AI Gateway {}",
341 c(BRIGHT_CYAN, "3"),
342 dim("— multi-provider routing")
343 );
344 eprintln!(
345 " {} Ollama {}",
346 c(BRIGHT_CYAN, "4"),
347 dim("— local, no API key")
348 );
349 eprintln!(
350 " {} Mock {}",
351 c(BRIGHT_CYAN, "5"),
352 dim("— testing, no API key")
353 );
354 eprintln!();
355
356 loop {
357 let input = prompt_with_default(" >", "1")?;
358 match input.as_str() {
359 "1" => return Ok(Provider::Anthropic),
360 "2" => return Ok(Provider::OpenAi),
361 "3" => return Ok(Provider::Vercel),
362 "4" => return Ok(Provider::Ollama),
363 "5" => return Ok(Provider::Mock),
364 _ => {
365 eprintln!(" {} Enter a number 1-5.", c(YELLOW, "!"));
366 }
367 }
368 }
369}
370
371fn prompt_api_key(provider: &Provider) -> Result<Option<String>> {
372 if !provider.needs_api_key() {
373 return Ok(None);
374 }
375
376 eprintln!();
377 let hint = provider.key_hint();
378 loop {
379 let key = prompt_secret(&format!(
380 " Enter your {} API key ({hint}): ",
381 bold(provider.name())
382 ))?;
383 if key.is_empty() {
384 eprintln!(
385 " {} API key is required for {}.",
386 c(YELLOW, "!"),
387 provider.name()
388 );
389 continue;
390 }
391 let visible = if key.len() > 8 {
393 format!("{}...{}", &key[..4], &key[key.len() - 4..])
394 } else {
395 "****".to_string()
396 };
397 eprintln!(" {} Key: {}", c(GREEN, "ok"), dim(&visible));
398 return Ok(Some(key));
399 }
400}
401
402fn prompt_base_url(provider: &Provider) -> Result<Option<String>> {
403 match provider {
404 Provider::Ollama => {
405 eprintln!();
406 let url = prompt_with_default(" Ollama base URL", "http://localhost:11434")?;
407 Ok(Some(url))
408 }
409 Provider::Vercel => {
410 eprintln!();
411 let url = prompt_with_default(" Vercel AI Gateway URL", "https://gateway.vercel.ai")?;
412 Ok(Some(url))
413 }
414 _ => Ok(None),
415 }
416}
417
418fn select_model(provider: &Provider) -> Result<String> {
419 let models = provider.models();
420
421 if models.len() == 1 {
423 let m = models[0].0;
424 eprintln!();
425 eprintln!(" Model: {}", c(GREEN, m));
426 return Ok(m.to_string());
427 }
428
429 eprintln!();
430 eprintln!(" {}", bold("Choose a model:"));
431 eprintln!();
432 for (i, (name, desc)) in models.iter().enumerate() {
433 let num = format!("{}", i + 1);
434 let default_marker = if i == 0 { " (default)" } else { "" };
435 eprintln!(
436 " {} {:<36} {}{}",
437 c(BRIGHT_CYAN, &num),
438 name,
439 dim(desc),
440 dim(default_marker)
441 );
442 }
443 eprintln!();
444
445 loop {
446 let input = prompt_with_default(" >", "1")?;
447
448 if models.iter().any(|(m, _)| *m == input) {
450 return Ok(input);
451 }
452
453 if let Ok(n) = input.parse::<usize>() {
454 if n >= 1 && n <= models.len() {
455 let chosen = models[n - 1].0;
456 if chosen == "custom" {
458 eprintln!();
459 let custom = prompt(" Enter model name: ")?;
460 if custom.is_empty() {
461 eprintln!(" {} Model name cannot be empty.", c(YELLOW, "!"));
462 continue;
463 }
464 return Ok(custom);
465 }
466 return Ok(chosen.to_string());
467 }
468 }
469 eprintln!(" {} Enter a number 1-{}.", c(YELLOW, "!"), models.len());
470 }
471}
472
473fn save_config(
476 provider: &Provider,
477 api_key: &Option<String>,
478 model: &str,
479 base_url: &Option<String>,
480) -> Result<()> {
481 let dir = config_dir();
482 std::fs::create_dir_all(&dir).context("failed to create ~/.life directory")?;
483
484 let cfg = LifeConfig {
485 provider: ProviderConfig {
486 name: provider.name().to_string(),
487 model: model.to_string(),
488 base_url: base_url.clone(),
489 },
490 consciousness: ConsciousnessConfig::default(),
491 arcan: ArcanConfig::default(),
492 };
493
494 let content = toml::to_string_pretty(&cfg).context("failed to serialize config")?;
495 let path = config_path();
496 std::fs::write(&path, &content)
497 .with_context(|| format!("failed to write {}", path.display()))?;
498
499 eprintln!();
500 eprintln!(
501 " {} Config saved to {}",
502 c(GREEN, "ok"),
503 dim(&path.display().to_string())
504 );
505
506 if let Some(key) = api_key {
508 let (env_var, kc_account) =
509 life_paths::credentials::provider_credential_names(provider.name());
510 let source = life_paths::credentials::store_credential(env_var, kc_account, key);
511 eprintln!(
512 " {} API key stored in {}",
513 c(GREEN, "ok"),
514 dim(&source.to_string())
515 );
516 }
517
518 Ok(())
519}
520
521async fn test_connection(
524 provider: &Provider,
525 api_key: &Option<String>,
526 model: &str,
527 base_url: &Option<String>,
528) -> Result<bool> {
529 eprintln!();
530 eprint!(" {} Testing connection...", c(CYAN, "◎"));
531 io::stderr().flush()?;
532
533 let client = reqwest::Client::builder()
534 .timeout(std::time::Duration::from_secs(15))
535 .build()?;
536
537 let result = match provider {
538 Provider::Anthropic => {
539 let key = api_key.as_deref().unwrap_or("");
540 client
541 .post("https://api.anthropic.com/v1/messages")
542 .header("x-api-key", key)
543 .header("anthropic-version", "2023-06-01")
544 .header("content-type", "application/json")
545 .json(&serde_json::json!({
546 "model": model,
547 "max_tokens": 1,
548 "messages": [{"role": "user", "content": "ping"}]
549 }))
550 .send()
551 .await
552 }
553 Provider::OpenAi => {
554 let key = api_key.as_deref().unwrap_or("");
555 client
556 .post("https://api.openai.com/v1/chat/completions")
557 .header("Authorization", format!("Bearer {key}"))
558 .header("content-type", "application/json")
559 .json(&serde_json::json!({
560 "model": model,
561 "max_tokens": 1,
562 "messages": [{"role": "user", "content": "ping"}]
563 }))
564 .send()
565 .await
566 }
567 Provider::Vercel => {
568 let url = base_url.as_deref().unwrap_or("https://gateway.vercel.ai");
569 let key = api_key.as_deref().unwrap_or("");
570 client
571 .post(format!("{url}/v1/chat/completions"))
572 .header("Authorization", format!("Bearer {key}"))
573 .header("content-type", "application/json")
574 .json(&serde_json::json!({
575 "model": model,
576 "max_tokens": 1,
577 "messages": [{"role": "user", "content": "ping"}]
578 }))
579 .send()
580 .await
581 }
582 Provider::Ollama => {
583 let url = base_url.as_deref().unwrap_or("http://localhost:11434");
584 client.get(format!("{url}/api/tags")).send().await
585 }
586 Provider::Mock => {
587 eprint!("\r");
589 eprintln!(
590 " {} Connected to {} ({})",
591 c(GREEN, "✓"),
592 bold("mock"),
593 model
594 );
595 return Ok(true);
596 }
597 };
598
599 match result {
600 Ok(resp) if resp.status().is_success() || resp.status().as_u16() == 200 => {
601 eprint!("\r");
602 eprintln!(
603 " {} Connected to {} ({})",
604 c(GREEN, "✓"),
605 bold(provider.name()),
606 model
607 );
608 Ok(true)
609 }
610 Ok(resp) => {
611 let status = resp.status();
612 let body = resp.text().await.unwrap_or_default();
613
614 let msg = serde_json::from_str::<serde_json::Value>(&body)
616 .ok()
617 .and_then(|v| {
618 v.get("error").and_then(|e| {
619 e.get("message")
620 .or(Some(e))
621 .and_then(|m| m.as_str().map(String::from))
622 })
623 })
624 .unwrap_or_else(|| format!("HTTP {status}"));
625
626 eprint!("\r");
627 eprintln!(" {} Connection failed: {}", c(RED, "✗"), msg);
628 eprintln!(" {}", dim("Run `life setup` to reconfigure."));
629 Ok(false)
630 }
631 Err(e) => {
632 eprint!("\r");
633 eprintln!(" {} Connection failed: {e}", c(RED, "✗"));
634 eprintln!(" {}", dim("Run `life setup` to reconfigure."));
635 Ok(false)
636 }
637 }
638}
639
640fn print_success(provider: &Provider, api_key: &Option<String>) {
643 eprintln!();
644 eprintln!(" {}", c(GREEN, "✓ Setup complete!"));
645 eprintln!();
646 eprintln!(" {}", bold("Quick start"));
647 eprintln!();
648 eprintln!(" {} reconfigure", c(CYAN, "life setup"));
649 eprintln!(" {} interactive TUI chat", c(CYAN, "arcan chat"));
650 eprintln!(" {} REPL mode", c(CYAN, "arcan shell"));
651 eprintln!(" {} start daemon", c(CYAN, "arcan serve"));
652 eprintln!(" {} deploy to cloud", c(CYAN, "life deploy"));
653 eprintln!(" {} check deployments", c(CYAN, "life status"));
654 eprintln!();
655
656 if api_key.is_some() && provider.needs_api_key() {
658 let (env_var, kc_account) =
659 life_paths::credentials::provider_credential_names(provider.name());
660 let storage_hint = if life_paths::keychain::is_available() {
661 format!("keychain (account: {kc_account})")
662 } else {
663 "~/.life/credentials/.env".to_string()
664 };
665 eprintln!(" {}", bold("Credentials"));
666 eprintln!();
667 eprintln!(" {env_var} stored in {}", dim(&storage_hint),);
668 eprintln!();
669 }
670
671 eprintln!(" Or run directly:");
672 eprintln!();
673 eprintln!(" {}", c(GREEN, "arcan chat"));
674 eprintln!();
675}
676
677pub async fn run() -> Result<()> {
680 print_banner();
681 print_system_info();
682
683 if config_exists() {
685 let answer = prompt(&format!(
686 " Existing config found at {}. Reconfigure? {}: ",
687 dim(&config_path().display().to_string()),
688 dim("[y/N]")
689 ))?;
690 if !matches!(answer.to_lowercase().as_str(), "y" | "yes") {
691 eprintln!();
692 eprintln!(" {} Keeping existing configuration.", c(GREEN, "ok"));
693 eprintln!(" Run {} to start.", c(CYAN, "arcan chat"));
694 eprintln!();
695 return Ok(());
696 }
697 eprintln!();
698 }
699
700 let provider = select_provider()?;
702
703 let api_key = prompt_api_key(&provider)?;
705
706 let base_url = prompt_base_url(&provider)?;
708
709 let model = select_model(&provider)?;
711
712 save_config(&provider, &api_key, &model, &base_url)?;
714
715 let _ok = test_connection(&provider, &api_key, &model, &base_url).await?;
717
718 print_success(&provider, &api_key);
720
721 Ok(())
722}