offline_intelligence/api/
mode_api.rs1use axum::{
8 extract::State,
9 http::StatusCode,
10 response::IntoResponse,
11 Json,
12};
13use serde::{Deserialize, Serialize};
14use tracing::{error, info};
15
16use crate::{
17 memory_db::ApiKeyType,
18 shared_state::UnifiedAppState,
19};
20
21#[derive(Debug, Deserialize)]
23pub struct SwitchModeRequest {
24 pub mode: String, }
26
27#[derive(Debug, Serialize)]
29pub struct SwitchModeResponse {
30 pub success: bool,
31 pub mode: String,
32 pub engine_status: String, pub message: String,
34 #[serde(default)]
37 pub requires_api_key: bool,
38}
39
40#[derive(Debug, Serialize)]
42pub struct ModeStatusResponse {
43 pub current_mode: String, pub engine_available: bool,
45 pub engine_status: String,
46 pub hf_token_set: bool,
47 pub openrouter_key_set: bool,
48 pub installed_models_count: usize,
49 pub openrouter_models_count: usize,
50}
51
52pub async fn switch_mode(
54 State(state): State<UnifiedAppState>,
55 Json(payload): Json<SwitchModeRequest>,
56) -> Result<impl IntoResponse, StatusCode> {
57 let mode = payload.mode.to_lowercase();
58
59 match mode.as_str() {
60 "offline" => {
61 info!("Switching to OFFLINE mode - starting local engine");
62
63 let engine_available = state.shared_state.engine_available.load(std::sync::atomic::Ordering::Relaxed);
65 if !engine_available {
66 return Ok(Json(SwitchModeResponse {
67 success: false,
68 mode: "offline".to_string(),
69 engine_status: "unavailable".to_string(),
70 message: "No llama.cpp engine installed. Please wait for auto-download or install manually.".to_string(),
71 requires_api_key: false,
72 }));
73 }
74
75 if let Ok(Some(_)) = state.shared_state.database_pool.api_keys.get_key_plaintext(&ApiKeyType::HuggingFace) {
77 let _ = state.shared_state.database_pool.api_keys.mark_used(ApiKeyType::HuggingFace, "offline");
78 }
79
80 let runtime_manager = {
82 let guard = state.shared_state.runtime_manager.read()
83 .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
84 guard.clone()
85 };
86
87 if let Some(rm) = runtime_manager {
88 let is_ready = rm.is_ready().await;
89 Ok(Json(SwitchModeResponse {
90 success: true,
91 mode: "offline".to_string(),
92 engine_status: if is_ready { "running" } else { "starting" }.to_string(),
93 message: "Switched to offline mode. Using local engine.".to_string(),
94 requires_api_key: false,
95 }))
96 } else {
97 Ok(Json(SwitchModeResponse {
98 success: false,
99 mode: "offline".to_string(),
100 engine_status: "unavailable".to_string(),
101 message: "Runtime manager not initialized".to_string(),
102 requires_api_key: false,
103 }))
104 }
105 }
106 "online" => {
107 info!("Switching to ONLINE mode - using OpenRouter API");
108
109 let openrouter_key_exists = state.shared_state.database_pool.api_keys
111 .get_key_plaintext(&ApiKeyType::OpenRouter)
112 .ok()
113 .flatten()
114 .is_some();
115
116 if openrouter_key_exists {
118 let _ = state.shared_state.database_pool.api_keys.mark_used(ApiKeyType::OpenRouter, "online");
119 }
120
121 Ok(Json(SwitchModeResponse {
124 success: true,
125 mode: "online".to_string(),
126 engine_status: "not_needed".to_string(),
127 message: if openrouter_key_exists {
128 "Switched to online mode. Using OpenRouter API.".to_string()
129 } else {
130 "Switched to online mode. Add your OpenRouter API key to start chatting.".to_string()
131 },
132 requires_api_key: !openrouter_key_exists,
133 }))
134 }
135 _ => Err(StatusCode::BAD_REQUEST),
136 }
137}
138
139pub async fn get_mode_status(
141 State(state): State<UnifiedAppState>,
142) -> Result<impl IntoResponse, StatusCode> {
143 let engine_available = state.shared_state.engine_available.load(std::sync::atomic::Ordering::Relaxed);
145
146 let runtime_manager = {
148 let guard = state.shared_state.runtime_manager.read()
149 .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
150 guard.clone()
151 };
152
153 let engine_status = if let Some(rm) = runtime_manager {
154 if rm.is_ready().await {
155 "running"
156 } else {
157 "stopped"
158 }
159 } else {
160 "unavailable"
161 };
162
163 let current_mode = if engine_status == "running" {
165 "offline"
166 } else {
167 "online" };
169
170 let hf_token_set = state.shared_state.database_pool.api_keys
172 .get_key_plaintext(&ApiKeyType::HuggingFace)
173 .ok()
174 .flatten()
175 .is_some();
176
177 let openrouter_key_set = state.shared_state.database_pool.api_keys
178 .get_key_plaintext(&ApiKeyType::OpenRouter)
179 .ok()
180 .flatten()
181 .is_some();
182
183 let (installed_models_count, openrouter_models_count) = if let Some(ref model_manager) = state.shared_state.model_manager {
185 let registry = model_manager.registry.read().await;
186 let all_models = registry.list_models();
187 let installed = all_models.iter().filter(|m|
188 matches!(m.status, crate::model_management::registry::ModelStatus::Installed)
189 && m.download_source.as_deref() != Some("openrouter")
190 ).count();
191 let openrouter = all_models.iter().filter(|m|
192 m.download_source.as_deref() == Some("openrouter")
193 ).count();
194 (installed, openrouter)
195 } else {
196 (0, 0)
197 };
198
199 Ok(Json(ModeStatusResponse {
200 current_mode: current_mode.to_string(),
201 engine_available,
202 engine_status: engine_status.to_string(),
203 hf_token_set,
204 openrouter_key_set,
205 installed_models_count,
206 openrouter_models_count,
207 }))
208}