use crate::index::embedding::{EmbeddingProvider, MockEmbeddingProvider};
use crate::llm::anthropic::AnthropicProvider;
use crate::llm::mistral::{MistralNativeEmbeddingProvider, MistralNativeProvider};
use crate::llm::openai::{OpenAiEmbeddingProvider, OpenAiProvider};
use crate::llm::vertex::{
MistralLlmProvider, VertexAiConfig, VertexAiEmbeddingProvider, VertexAiLlmProvider,
};
use crate::llm::{LlmProvider, MockLlmProvider};
fn is_mistral_model(model: &str) -> bool {
let lower = model.to_lowercase();
lower.contains("mistral") || lower.contains("codestral")
}
pub fn build_llm_provider() -> (Box<dyn LlmProvider>, &'static str) {
let forced = std::env::var("NOETHER_LLM_PROVIDER").unwrap_or_default();
match forced.as_str() {
"mock" => return (Box::new(MockLlmProvider::new("{}")), "mock"),
"mistral" => match build_mistral_native_llm() {
Ok(p) => return (p, "mistral-native"),
Err(e) => {
eprintln!("Warning: Mistral native LLM unavailable: {e}. Falling back.");
}
},
"openai" => match build_openai_llm() {
Ok(p) => return (p, "openai"),
Err(e) => {
eprintln!("Warning: OpenAI LLM unavailable: {e}. Falling back.");
}
},
"anthropic" => match build_anthropic_llm() {
Ok(p) => return (p, "anthropic"),
Err(e) => {
eprintln!("Warning: Anthropic LLM unavailable: {e}. Falling back.");
}
},
"vertex" => match build_vertex_or_mistral_llm() {
Ok((p, name)) => return (p, name),
Err(e) => {
eprintln!("Warning: Vertex AI LLM unavailable: {e}. Falling back to mock.");
return (Box::new(MockLlmProvider::new("{}")), "mock");
}
},
"claude-cli" | "gemini-cli" | "cursor-cli" | "opencode" => {
use crate::llm::cli_provider::{specs, CliProvider};
let spec = match forced.as_str() {
"claude-cli" => specs::CLAUDE,
"gemini-cli" => specs::GEMINI,
"cursor-cli" => specs::CURSOR,
"opencode" => specs::OPENCODE,
_ => unreachable!(),
};
let provider = CliProvider::new(spec);
if provider.available() {
return (Box::new(provider), spec.provider_slug);
}
eprintln!(
"Warning: NOETHER_LLM_PROVIDER={} but `{}` binary not found on PATH \
(or NOETHER_LLM_SKIP_CLI is set).",
forced, spec.binary
);
}
_ => {} }
if let Ok(p) = build_mistral_native_llm() {
return (p, "mistral-native");
}
if let Ok(p) = build_openai_llm() {
return (p, "openai");
}
if let Ok(p) = build_anthropic_llm() {
return (p, "anthropic");
}
if let Ok((p, name)) = build_vertex_or_mistral_llm() {
return (p, name);
}
{
use crate::llm::cli_provider::{specs, CliProvider};
for spec in specs::ALL {
let provider = CliProvider::new(*spec);
if provider.available() {
return (Box::new(provider), spec.provider_slug);
}
}
}
eprintln!("Warning: No LLM provider configured. Using mock.");
eprintln!(" Set MISTRAL_API_KEY for the native Mistral API (recommended),");
eprintln!(" or set OPENAI_API_KEY, ANTHROPIC_API_KEY, or GOOGLE_APPLICATION_CREDENTIALS.");
(Box::new(MockLlmProvider::new("{}")), "mock")
}
pub fn build_embedding_provider() -> (Box<dyn EmbeddingProvider>, &'static str) {
let forced = std::env::var("NOETHER_EMBEDDING_PROVIDER").unwrap_or_default();
match forced.as_str() {
"mock" => return (Box::new(MockEmbeddingProvider::new(128)), "mock"),
"mistral" => match MistralNativeEmbeddingProvider::from_env() {
Ok(p) => return (Box::new(p), "mistral-native"),
Err(e) => {
eprintln!("Warning: Mistral native embeddings unavailable: {e}. Falling back.");
}
},
"openai" => match build_openai_embedding() {
Ok(p) => return (p, "openai"),
Err(e) => {
eprintln!("Warning: OpenAI embeddings unavailable: {e}. Falling back.");
}
},
"vertex" => match build_vertex_embedding() {
Ok(p) => return (p, "vertex"),
Err(e) => {
eprintln!("Warning: Vertex AI embeddings unavailable: {e}. Falling back to mock.");
return (Box::new(MockEmbeddingProvider::new(128)), "mock");
}
},
_ => {} }
if let Ok(p) = MistralNativeEmbeddingProvider::from_env() {
return (Box::new(p), "mistral-native");
}
if let Ok(p) = build_openai_embedding() {
return (p, "openai");
}
if let Ok(p) = build_vertex_embedding() {
return (p, "vertex");
}
(Box::new(MockEmbeddingProvider::new(128)), "mock")
}
fn build_mistral_native_llm() -> Result<Box<dyn LlmProvider>, String> {
Ok(Box::new(MistralNativeProvider::from_env()?))
}
fn build_openai_llm() -> Result<Box<dyn LlmProvider>, String> {
Ok(Box::new(OpenAiProvider::from_env()?))
}
fn build_anthropic_llm() -> Result<Box<dyn LlmProvider>, String> {
Ok(Box::new(AnthropicProvider::from_env()?))
}
fn build_openai_embedding() -> Result<Box<dyn EmbeddingProvider>, String> {
Ok(Box::new(OpenAiEmbeddingProvider::from_env()?))
}
fn build_vertex_or_mistral_llm() -> Result<(Box<dyn LlmProvider>, &'static str), String> {
let model = std::env::var("VERTEX_AI_MODEL")
.unwrap_or_else(|_| crate::llm::LlmConfig::default().model.clone());
let config = VertexAiConfig::from_env()?;
if is_mistral_model(&model) {
Ok((Box::new(MistralLlmProvider::new(config)), "mistral-vertex"))
} else {
Ok((Box::new(VertexAiLlmProvider::new(config)), "vertex"))
}
}
fn build_vertex_embedding() -> Result<Box<dyn EmbeddingProvider>, String> {
let config = VertexAiConfig::from_env()?;
let model = std::env::var("VERTEX_AI_EMBEDDING_MODEL").ok();
let dimensions = std::env::var("VERTEX_AI_EMBEDDING_DIMENSIONS")
.ok()
.and_then(|s| s.parse().ok());
Ok(Box::new(VertexAiEmbeddingProvider::new(
config, model, dimensions,
)))
}