use anyhow::{Context, Result};
use perspt_core::GenAIProvider;
pub async fn run() -> Result<()> {
log::info!("Starting chat mode...");
let (provider_type, default_model) = detect_provider_from_env();
log::info!(
"Using provider: {}, model: {}",
provider_type,
default_model
);
let provider = GenAIProvider::new_with_config(Some(provider_type), None)
.context("Failed to create LLM provider. Ensure an API key is set.")?;
perspt_tui::run_chat_tui(provider, default_model.to_string()).await?;
Ok(())
}
fn detect_provider_from_env() -> (&'static str, &'static str) {
if std::env::var("GEMINI_API_KEY").is_ok() {
("gemini", "gemini-3.1-flash-lite-preview")
} else if std::env::var("OPENAI_API_KEY").is_ok() {
("openai", "gpt-4o-mini")
} else if std::env::var("ANTHROPIC_API_KEY").is_ok() {
("anthropic", "claude-3-5-sonnet-20241022")
} else if std::env::var("GROQ_API_KEY").is_ok() {
("groq", "llama-3.1-8b-instant")
} else if std::env::var("COHERE_API_KEY").is_ok() {
("cohere", "command-r-plus")
} else if std::env::var("XAI_API_KEY").is_ok() {
("xai", "grok-beta")
} else if std::env::var("DEEPSEEK_API_KEY").is_ok() {
("deepseek", "deepseek-chat")
} else {
("ollama", "llama3.2")
}
}