1use llm_edge_cache::{l2::L2Config, CacheManager};
11use llm_edge_providers::{anthropic::AnthropicAdapter, openai::OpenAIAdapter, LLMProvider};
12use std::sync::Arc;
13use tracing::{info, warn};
14
15#[derive(Clone)]
20pub struct AppState {
21 pub cache_manager: Arc<CacheManager>,
23
24 pub openai_provider: Option<Arc<dyn LLMProvider>>,
26
27 pub anthropic_provider: Option<Arc<dyn LLMProvider>>,
29
30 pub config: Arc<AppConfig>,
32}
33
34#[derive(Debug, Clone)]
36pub struct AppConfig {
37 pub host: String,
39
40 pub port: u16,
42
43 pub enable_l2_cache: bool,
45
46 pub redis_url: Option<String>,
48
49 pub openai_api_key: Option<String>,
51
52 pub anthropic_api_key: Option<String>,
54
55 pub enable_tracing: bool,
57
58 pub enable_metrics: bool,
60
61 pub metrics_port: u16,
63}
64
65impl Default for AppConfig {
66 fn default() -> Self {
67 Self {
68 host: "0.0.0.0".to_string(),
69 port: 8080,
70 enable_l2_cache: false,
71 redis_url: None,
72 openai_api_key: None,
73 anthropic_api_key: None,
74 enable_tracing: true,
75 enable_metrics: true,
76 metrics_port: 9090,
77 }
78 }
79}
80
81impl AppConfig {
82 pub fn from_env() -> Self {
84 Self {
85 host: std::env::var("HOST").unwrap_or_else(|_| "0.0.0.0".to_string()),
86 port: std::env::var("PORT")
87 .ok()
88 .and_then(|p| p.parse().ok())
89 .unwrap_or(8080),
90 enable_l2_cache: std::env::var("ENABLE_L2_CACHE")
91 .ok()
92 .and_then(|v| v.parse().ok())
93 .unwrap_or(false),
94 redis_url: std::env::var("REDIS_URL").ok(),
95 openai_api_key: std::env::var("OPENAI_API_KEY").ok(),
96 anthropic_api_key: std::env::var("ANTHROPIC_API_KEY").ok(),
97 enable_tracing: std::env::var("ENABLE_TRACING")
98 .ok()
99 .and_then(|v| v.parse().ok())
100 .unwrap_or(true),
101 enable_metrics: std::env::var("ENABLE_METRICS")
102 .ok()
103 .and_then(|v| v.parse().ok())
104 .unwrap_or(true),
105 metrics_port: std::env::var("METRICS_PORT")
106 .ok()
107 .and_then(|p| p.parse().ok())
108 .unwrap_or(9090),
109 }
110 }
111}
112
113pub async fn initialize_app_state(config: AppConfig) -> anyhow::Result<AppState> {
121 info!("Initializing LLM Edge Agent application state");
122
123 info!("Initializing cache layer");
125 let cache_manager = if config.enable_l2_cache {
126 if let Some(ref redis_url) = config.redis_url {
127 info!("L2 cache enabled with Redis: {}", redis_url);
128 let l2_config = L2Config {
129 redis_url: redis_url.clone(),
130 ttl_seconds: 3600, connection_timeout_ms: 1000,
132 operation_timeout_ms: 100,
133 key_prefix: "llm-edge:".to_string(),
134 };
135 Arc::new(CacheManager::with_l2(l2_config).await)
136 } else {
137 warn!("L2 cache enabled but no Redis URL provided, using L1 only");
138 Arc::new(CacheManager::new())
139 }
140 } else {
141 info!("Using L1 cache only (in-memory)");
142 Arc::new(CacheManager::new())
143 };
144
145 info!("Initializing provider adapters");
147
148 let openai_provider: Option<Arc<dyn LLMProvider>> =
149 if let Some(ref api_key) = config.openai_api_key {
150 info!("Initializing OpenAI provider");
151 Some(Arc::new(OpenAIAdapter::new(api_key.clone())))
152 } else {
153 warn!("OpenAI API key not provided, OpenAI provider will not be available");
154 None
155 };
156
157 let anthropic_provider: Option<Arc<dyn LLMProvider>> =
158 if let Some(ref api_key) = config.anthropic_api_key {
159 info!("Initializing Anthropic provider");
160 Some(Arc::new(AnthropicAdapter::new(api_key.clone())))
161 } else {
162 warn!("Anthropic API key not provided, Anthropic provider will not be available");
163 None
164 };
165
166 if openai_provider.is_none() && anthropic_provider.is_none() {
168 return Err(anyhow::anyhow!(
169 "No LLM providers configured. Please set OPENAI_API_KEY or ANTHROPIC_API_KEY"
170 ));
171 }
172
173 let app_state = AppState {
175 cache_manager,
176 openai_provider,
177 anthropic_provider,
178 config: Arc::new(config),
179 };
180
181 info!("Application state initialized successfully");
182 Ok(app_state)
183}
184
185pub async fn check_system_health(state: &AppState) -> SystemHealthStatus {
187 let cache_health = state.cache_manager.health_check().await;
188
189 let openai_healthy = if let Some(ref provider) = state.openai_provider {
190 matches!(
191 provider.health().await,
192 llm_edge_providers::adapter::HealthStatus::Healthy
193 )
194 } else {
195 false
196 };
197
198 let anthropic_healthy = if let Some(ref provider) = state.anthropic_provider {
199 matches!(
200 provider.health().await,
201 llm_edge_providers::adapter::HealthStatus::Healthy
202 )
203 } else {
204 false
205 };
206
207 SystemHealthStatus {
208 cache_l1_healthy: cache_health.l1_healthy,
209 cache_l2_healthy: cache_health.l2_healthy,
210 cache_l2_configured: cache_health.l2_configured,
211 openai_healthy,
212 openai_configured: state.openai_provider.is_some(),
213 anthropic_healthy,
214 anthropic_configured: state.anthropic_provider.is_some(),
215 }
216}
217
218#[derive(Debug, Clone)]
220pub struct SystemHealthStatus {
221 pub cache_l1_healthy: bool,
222 pub cache_l2_healthy: bool,
223 pub cache_l2_configured: bool,
224 pub openai_healthy: bool,
225 pub openai_configured: bool,
226 pub anthropic_healthy: bool,
227 pub anthropic_configured: bool,
228}
229
230impl SystemHealthStatus {
231 pub fn is_healthy(&self) -> bool {
232 let cache_healthy =
237 self.cache_l1_healthy && (!self.cache_l2_configured || self.cache_l2_healthy);
238
239 let provider_healthy = self.openai_healthy || self.anthropic_healthy;
240
241 cache_healthy && provider_healthy
242 }
243
244 pub fn status_string(&self) -> String {
245 if self.is_healthy() {
246 "healthy".to_string()
247 } else {
248 "degraded".to_string()
249 }
250 }
251}
252
253#[cfg(test)]
254mod tests {
255 use super::*;
256
257 #[test]
258 fn test_app_config_default() {
259 let config = AppConfig::default();
260 assert_eq!(config.host, "0.0.0.0");
261 assert_eq!(config.port, 8080);
262 assert!(!config.enable_l2_cache);
263 }
264
265 #[test]
266 fn test_system_health_all_healthy() {
267 let status = SystemHealthStatus {
268 cache_l1_healthy: true,
269 cache_l2_healthy: true,
270 cache_l2_configured: true,
271 openai_healthy: true,
272 openai_configured: true,
273 anthropic_healthy: false,
274 anthropic_configured: false,
275 };
276
277 assert!(status.is_healthy());
278 assert_eq!(status.status_string(), "healthy");
279 }
280
281 #[test]
282 fn test_system_health_degraded() {
283 let status = SystemHealthStatus {
284 cache_l1_healthy: true,
285 cache_l2_healthy: false,
286 cache_l2_configured: true,
287 openai_healthy: false,
288 openai_configured: true,
289 anthropic_healthy: false,
290 anthropic_configured: false,
291 };
292
293 assert!(!status.is_healthy());
294 assert_eq!(status.status_string(), "degraded");
295 }
296
297 #[test]
298 fn test_system_health_l2_not_configured() {
299 let status = SystemHealthStatus {
300 cache_l1_healthy: true,
301 cache_l2_healthy: false,
302 cache_l2_configured: false, openai_healthy: true,
304 openai_configured: true,
305 anthropic_healthy: false,
306 anthropic_configured: false,
307 };
308
309 assert!(status.is_healthy());
310 }
311}