noether_engine/
providers.rs1use crate::index::embedding::{EmbeddingProvider, MockEmbeddingProvider};
42use crate::llm::anthropic::AnthropicProvider;
43use crate::llm::mistral::{MistralNativeEmbeddingProvider, MistralNativeProvider};
44use crate::llm::openai::{OpenAiEmbeddingProvider, OpenAiProvider};
45use crate::llm::vertex::{
46 MistralLlmProvider, VertexAiConfig, VertexAiEmbeddingProvider, VertexAiLlmProvider,
47};
48use crate::llm::{LlmProvider, MockLlmProvider};
49
50fn is_mistral_model(model: &str) -> bool {
52 let lower = model.to_lowercase();
53 lower.contains("mistral") || lower.contains("codestral")
54}
55
56pub fn build_llm_provider() -> (Box<dyn LlmProvider>, &'static str) {
60 let forced = std::env::var("NOETHER_LLM_PROVIDER").unwrap_or_default();
61
62 match forced.as_str() {
63 "mock" => return (Box::new(MockLlmProvider::new("{}")), "mock"),
64 "mistral" => match build_mistral_native_llm() {
65 Ok(p) => return (p, "mistral-native"),
66 Err(e) => {
67 eprintln!("Warning: Mistral native LLM unavailable: {e}. Falling back.");
68 }
69 },
70 "openai" => match build_openai_llm() {
71 Ok(p) => return (p, "openai"),
72 Err(e) => {
73 eprintln!("Warning: OpenAI LLM unavailable: {e}. Falling back.");
74 }
75 },
76 "anthropic" => match build_anthropic_llm() {
77 Ok(p) => return (p, "anthropic"),
78 Err(e) => {
79 eprintln!("Warning: Anthropic LLM unavailable: {e}. Falling back.");
80 }
81 },
82 "vertex" => match build_vertex_or_mistral_llm() {
83 Ok((p, name)) => return (p, name),
84 Err(e) => {
85 eprintln!("Warning: Vertex AI LLM unavailable: {e}. Falling back to mock.");
86 return (Box::new(MockLlmProvider::new("{}")), "mock");
87 }
88 },
89 "claude-cli" | "gemini-cli" | "cursor-cli" | "opencode" => {
90 use crate::llm::cli_provider::{specs, CliProvider};
93 let spec = match forced.as_str() {
94 "claude-cli" => specs::CLAUDE,
95 "gemini-cli" => specs::GEMINI,
96 "cursor-cli" => specs::CURSOR,
97 "opencode" => specs::OPENCODE,
98 _ => unreachable!(),
99 };
100 let provider = CliProvider::new(spec);
101 if provider.available() {
102 return (Box::new(provider), spec.provider_slug);
103 }
104 eprintln!(
105 "Warning: NOETHER_LLM_PROVIDER={} but `{}` binary not found on PATH \
106 (or NOETHER_LLM_SKIP_CLI is set).",
107 forced, spec.binary
108 );
109 }
110 _ => {} }
112
113 if let Ok(p) = build_mistral_native_llm() {
115 return (p, "mistral-native");
116 }
117 if let Ok(p) = build_openai_llm() {
118 return (p, "openai");
119 }
120 if let Ok(p) = build_anthropic_llm() {
121 return (p, "anthropic");
122 }
123 if let Ok((p, name)) = build_vertex_or_mistral_llm() {
124 return (p, name);
125 }
126 {
131 use crate::llm::cli_provider::{specs, CliProvider};
132 for spec in specs::ALL {
133 let provider = CliProvider::new(*spec);
134 if provider.available() {
135 return (Box::new(provider), spec.provider_slug);
136 }
137 }
138 }
139 eprintln!("Warning: No LLM provider configured. Using mock.");
140 eprintln!(" Set MISTRAL_API_KEY for the native Mistral API (recommended),");
141 eprintln!(" or set OPENAI_API_KEY, ANTHROPIC_API_KEY, or GOOGLE_APPLICATION_CREDENTIALS.");
142 (Box::new(MockLlmProvider::new("{}")), "mock")
143}
144
145pub fn build_embedding_provider() -> (Box<dyn EmbeddingProvider>, &'static str) {
149 let forced = std::env::var("NOETHER_EMBEDDING_PROVIDER").unwrap_or_default();
150
151 match forced.as_str() {
152 "mock" => return (Box::new(MockEmbeddingProvider::new(128)), "mock"),
153 "mistral" => match MistralNativeEmbeddingProvider::from_env() {
154 Ok(p) => return (Box::new(p), "mistral-native"),
155 Err(e) => {
156 eprintln!("Warning: Mistral native embeddings unavailable: {e}. Falling back.");
157 }
158 },
159 "openai" => match build_openai_embedding() {
160 Ok(p) => return (p, "openai"),
161 Err(e) => {
162 eprintln!("Warning: OpenAI embeddings unavailable: {e}. Falling back.");
163 }
164 },
165 "vertex" => match build_vertex_embedding() {
166 Ok(p) => return (p, "vertex"),
167 Err(e) => {
168 eprintln!("Warning: Vertex AI embeddings unavailable: {e}. Falling back to mock.");
169 return (Box::new(MockEmbeddingProvider::new(128)), "mock");
170 }
171 },
172 _ => {} }
174
175 if let Ok(p) = MistralNativeEmbeddingProvider::from_env() {
177 return (Box::new(p), "mistral-native");
178 }
179 if let Ok(p) = build_openai_embedding() {
180 return (p, "openai");
181 }
182 if let Ok(p) = build_vertex_embedding() {
183 return (p, "vertex");
184 }
185 (Box::new(MockEmbeddingProvider::new(128)), "mock")
186}
187
188fn build_mistral_native_llm() -> Result<Box<dyn LlmProvider>, String> {
191 Ok(Box::new(MistralNativeProvider::from_env()?))
192}
193
194fn build_openai_llm() -> Result<Box<dyn LlmProvider>, String> {
195 Ok(Box::new(OpenAiProvider::from_env()?))
196}
197
198fn build_anthropic_llm() -> Result<Box<dyn LlmProvider>, String> {
199 Ok(Box::new(AnthropicProvider::from_env()?))
200}
201
202fn build_openai_embedding() -> Result<Box<dyn EmbeddingProvider>, String> {
203 Ok(Box::new(OpenAiEmbeddingProvider::from_env()?))
204}
205
206fn build_vertex_or_mistral_llm() -> Result<(Box<dyn LlmProvider>, &'static str), String> {
207 let model = std::env::var("VERTEX_AI_MODEL")
208 .unwrap_or_else(|_| crate::llm::LlmConfig::default().model.clone());
209 let config = VertexAiConfig::from_env()?;
210
211 if is_mistral_model(&model) {
212 Ok((Box::new(MistralLlmProvider::new(config)), "mistral-vertex"))
213 } else {
214 Ok((Box::new(VertexAiLlmProvider::new(config)), "vertex"))
215 }
216}
217
218fn build_vertex_embedding() -> Result<Box<dyn EmbeddingProvider>, String> {
219 let config = VertexAiConfig::from_env()?;
220 let model = std::env::var("VERTEX_AI_EMBEDDING_MODEL").ok();
221 let dimensions = std::env::var("VERTEX_AI_EMBEDDING_DIMENSIONS")
222 .ok()
223 .and_then(|s| s.parse().ok());
224 Ok(Box::new(VertexAiEmbeddingProvider::new(
225 config, model, dimensions,
226 )))
227}