noether_engine/
providers.rs1use crate::index::embedding::{EmbeddingProvider, MockEmbeddingProvider};
42use crate::llm::anthropic::AnthropicProvider;
43use crate::llm::mistral::{MistralNativeEmbeddingProvider, MistralNativeProvider};
44use crate::llm::openai::{OpenAiEmbeddingProvider, OpenAiProvider};
45use crate::llm::vertex::{
46 MistralLlmProvider, VertexAiConfig, VertexAiEmbeddingProvider, VertexAiLlmProvider,
47};
48use crate::llm::{LlmProvider, MockLlmProvider};
49
50fn is_mistral_model(model: &str) -> bool {
52 let lower = model.to_lowercase();
53 lower.contains("mistral") || lower.contains("codestral")
54}
55
56pub fn build_llm_provider() -> (Box<dyn LlmProvider>, &'static str) {
60 let forced = std::env::var("NOETHER_LLM_PROVIDER").unwrap_or_default();
61
62 match forced.as_str() {
63 "mock" => return (Box::new(MockLlmProvider::new("{}")), "mock"),
64 "mistral" => match build_mistral_native_llm() {
65 Ok(p) => return (p, "mistral-native"),
66 Err(e) => {
67 eprintln!("Warning: Mistral native LLM unavailable: {e}. Falling back.");
68 }
69 },
70 "openai" => match build_openai_llm() {
71 Ok(p) => return (p, "openai"),
72 Err(e) => {
73 eprintln!("Warning: OpenAI LLM unavailable: {e}. Falling back.");
74 }
75 },
76 "anthropic" => match build_anthropic_llm() {
77 Ok(p) => return (p, "anthropic"),
78 Err(e) => {
79 eprintln!("Warning: Anthropic LLM unavailable: {e}. Falling back.");
80 }
81 },
82 "vertex" => match build_vertex_or_mistral_llm() {
83 Ok((p, name)) => return (p, name),
84 Err(e) => {
85 eprintln!("Warning: Vertex AI LLM unavailable: {e}. Falling back to mock.");
86 return (Box::new(MockLlmProvider::new("{}")), "mock");
87 }
88 },
89 _ => {} }
91
92 if let Ok(p) = build_mistral_native_llm() {
94 return (p, "mistral-native");
95 }
96 if let Ok(p) = build_openai_llm() {
97 return (p, "openai");
98 }
99 if let Ok(p) = build_anthropic_llm() {
100 return (p, "anthropic");
101 }
102 if let Ok((p, name)) = build_vertex_or_mistral_llm() {
103 return (p, name);
104 }
105 eprintln!("Warning: No LLM provider configured. Using mock.");
106 eprintln!(" Set MISTRAL_API_KEY for the native Mistral API (recommended),");
107 eprintln!(" or set OPENAI_API_KEY, ANTHROPIC_API_KEY, or GOOGLE_APPLICATION_CREDENTIALS.");
108 (Box::new(MockLlmProvider::new("{}")), "mock")
109}
110
111pub fn build_embedding_provider() -> (Box<dyn EmbeddingProvider>, &'static str) {
115 let forced = std::env::var("NOETHER_EMBEDDING_PROVIDER").unwrap_or_default();
116
117 match forced.as_str() {
118 "mock" => return (Box::new(MockEmbeddingProvider::new(128)), "mock"),
119 "mistral" => match MistralNativeEmbeddingProvider::from_env() {
120 Ok(p) => return (Box::new(p), "mistral-native"),
121 Err(e) => {
122 eprintln!("Warning: Mistral native embeddings unavailable: {e}. Falling back.");
123 }
124 },
125 "openai" => match build_openai_embedding() {
126 Ok(p) => return (p, "openai"),
127 Err(e) => {
128 eprintln!("Warning: OpenAI embeddings unavailable: {e}. Falling back.");
129 }
130 },
131 "vertex" => match build_vertex_embedding() {
132 Ok(p) => return (p, "vertex"),
133 Err(e) => {
134 eprintln!("Warning: Vertex AI embeddings unavailable: {e}. Falling back to mock.");
135 return (Box::new(MockEmbeddingProvider::new(128)), "mock");
136 }
137 },
138 _ => {} }
140
141 if let Ok(p) = MistralNativeEmbeddingProvider::from_env() {
143 return (Box::new(p), "mistral-native");
144 }
145 if let Ok(p) = build_openai_embedding() {
146 return (p, "openai");
147 }
148 if let Ok(p) = build_vertex_embedding() {
149 return (p, "vertex");
150 }
151 (Box::new(MockEmbeddingProvider::new(128)), "mock")
152}
153
154fn build_mistral_native_llm() -> Result<Box<dyn LlmProvider>, String> {
157 Ok(Box::new(MistralNativeProvider::from_env()?))
158}
159
160fn build_openai_llm() -> Result<Box<dyn LlmProvider>, String> {
161 Ok(Box::new(OpenAiProvider::from_env()?))
162}
163
164fn build_anthropic_llm() -> Result<Box<dyn LlmProvider>, String> {
165 Ok(Box::new(AnthropicProvider::from_env()?))
166}
167
168fn build_openai_embedding() -> Result<Box<dyn EmbeddingProvider>, String> {
169 Ok(Box::new(OpenAiEmbeddingProvider::from_env()?))
170}
171
172fn build_vertex_or_mistral_llm() -> Result<(Box<dyn LlmProvider>, &'static str), String> {
173 let model = std::env::var("VERTEX_AI_MODEL")
174 .unwrap_or_else(|_| crate::llm::LlmConfig::default().model.clone());
175 let config = VertexAiConfig::from_env()?;
176
177 if is_mistral_model(&model) {
178 Ok((Box::new(MistralLlmProvider::new(config)), "mistral-vertex"))
179 } else {
180 Ok((Box::new(VertexAiLlmProvider::new(config)), "vertex"))
181 }
182}
183
184fn build_vertex_embedding() -> Result<Box<dyn EmbeddingProvider>, String> {
185 let config = VertexAiConfig::from_env()?;
186 let model = std::env::var("VERTEX_AI_EMBEDDING_MODEL").ok();
187 let dimensions = std::env::var("VERTEX_AI_EMBEDDING_DIMENSIONS")
188 .ok()
189 .and_then(|s| s.parse().ok());
190 Ok(Box::new(VertexAiEmbeddingProvider::new(
191 config, model, dimensions,
192 )))
193}