1use anyhow::Result;
23use axum::{
24 extract::{Query, Request, State, WebSocketUpgrade},
25 http::StatusCode,
26 middleware::{self, Next},
27 response::IntoResponse,
28 routing::{get, post},
29 Json, Router,
30};
31use serde::{Deserialize, Serialize};
32use std::collections::HashMap;
33use std::net::SocketAddr;
34use std::path::PathBuf;
35use std::sync::Arc;
36use tokio::sync::oneshot;
37use tokio::sync::RwLock;
38
39use crate::proxy::memory::ProxyMemory;
41use crate::proxy::openai_compat::{
42 OpenAiChoice, OpenAiError, OpenAiErrorResponse, OpenAiRequest, OpenAiResponse,
43 OpenAiResponseMessage, OpenAiUsage,
44};
45use crate::proxy::{LlmMessage, LlmProxy, LlmRequest, LlmRole};
46
47use crate::auth::{create_session_store, GitHubOAuthConfig, SharedSessionStore};
49use crate::collaboration::{create_hub, SharedCollabHub};
50
51use crate::hot_watcher::HotWatcher;
53
54use crate::web_dashboard::mcp_http::{create_mcp_context, mcp_router};
56
57pub fn token_path() -> PathBuf {
64 if let Ok(p) = std::env::var("ST_TOKEN_PATH") {
65 return PathBuf::from(p);
66 }
67 dirs::home_dir()
68 .unwrap_or_else(|| PathBuf::from("/tmp"))
69 .join(".st")
70 .join("daemon.token")
71}
72
73pub fn load_or_create_token() -> Result<String> {
76 let path = token_path();
77
78 if path.exists() {
80 let token = std::fs::read_to_string(&path)?.trim().to_string();
81 if !token.is_empty() {
82 return Ok(token);
83 }
84 }
85
86 use rand::Rng;
88 let mut rng = rand::thread_rng();
89 let bytes: Vec<u8> = (0..32).map(|_| rng.gen::<u8>()).collect();
90 let token = hex::encode(&bytes);
91
92 if let Some(parent) = path.parent() {
94 std::fs::create_dir_all(parent)?;
95 }
96
97 std::fs::write(&path, &token)?;
99 #[cfg(unix)]
100 {
101 use std::os::unix::fs::PermissionsExt;
102 let mode = if path.starts_with("/var/lib") {
105 0o644
106 } else {
107 0o600
108 };
109 std::fs::set_permissions(&path, std::fs::Permissions::from_mode(mode))?;
110 }
111
112 println!(" ๐ Generated new daemon auth token at {}", path.display());
113 Ok(token)
114}
115
116pub fn load_token() -> Option<String> {
119 load_all_tokens().into_iter().next()
120}
121
122pub fn load_all_tokens() -> Vec<String> {
124 let mut tokens = Vec::new();
125
126 let system_path = std::path::PathBuf::from("/var/lib/smart-tree/daemon.token");
128 if let Ok(token) = std::fs::read_to_string(&system_path) {
129 let t = token.trim().to_string();
130 if !t.is_empty() {
131 tokens.push(t);
132 }
133 }
134
135 let path = token_path();
137 if let Ok(token) = std::fs::read_to_string(&path) {
138 let t = token.trim().to_string();
139 if !t.is_empty() && !tokens.contains(&t) {
140 tokens.push(t);
141 }
142 }
143
144 tokens
145}
146
147async fn auth_middleware(
149 State(expected_tokens): State<Vec<String>>,
150 req: Request,
151 next: Next,
152) -> impl IntoResponse {
153 if req.uri().path() == "/health" {
155 return next.run(req).await;
156 }
157
158 let auth_header = req
160 .headers()
161 .get("authorization")
162 .and_then(|v| v.to_str().ok());
163
164 match auth_header {
165 Some(header) if header.starts_with("Bearer ") => {
166 let provided = &header[7..];
167 if expected_tokens.iter().any(|t| t == provided) {
168 next.run(req).await
169 } else {
170 (StatusCode::UNAUTHORIZED, "Invalid token").into_response()
171 }
172 }
173 _ => (StatusCode::UNAUTHORIZED, "Bearer token required").into_response(),
174 }
175}
176
177#[derive(Debug, Clone)]
179pub struct DaemonConfig {
180 pub port: u16,
182 pub watch_paths: Vec<PathBuf>,
184 pub orchestrator_url: Option<String>,
186 pub enable_credits: bool,
188 pub allow_external: bool,
190}
191
192impl Default for DaemonConfig {
193 fn default() -> Self {
194 Self {
195 port: 28428,
196 watch_paths: vec![],
197 orchestrator_url: Some("wss://gpu.foken.ai/api/credits".to_string()),
198 enable_credits: true,
199 allow_external: false,
200 }
201 }
202}
203
204pub struct DaemonState {
206 pub context: SystemContext,
208 pub credits: CreditTracker,
210 pub config: DaemonConfig,
212 pub shutdown_tx: Option<oneshot::Sender<()>>,
214 pub llm_proxy: LlmProxy,
216 pub proxy_memory: ProxyMemory,
218 pub collab_hub: SharedCollabHub,
220 pub sessions: SharedSessionStore,
222 pub github_oauth: Option<GitHubOAuthConfig>,
224 pub hot_watcher: Arc<RwLock<HotWatcher>>,
226}
227
228#[derive(Debug, Default)]
230pub struct SystemContext {
231 pub projects: HashMap<PathBuf, ProjectInfo>,
233 pub consciousnesses: HashMap<PathBuf, DirectoryInfo>,
235 pub last_scan: Option<std::time::SystemTime>,
237}
238
239#[derive(Debug, Clone, Serialize)]
240pub struct ProjectInfo {
241 pub path: String,
242 pub name: String,
243 pub project_type: String,
244 pub key_files: Vec<String>,
245 pub essence: String,
246}
247
248#[derive(Debug, Clone, Serialize)]
249pub struct DirectoryInfo {
250 pub path: String,
251 pub frequency: f64,
252 pub file_count: usize,
253 pub patterns: Vec<String>,
254}
255
256#[derive(Debug, Default)]
258pub struct CreditTracker {
259 pub balance: f64,
260 pub total_earned: f64,
261 pub total_spent: f64,
262 pub transactions: Vec<Transaction>,
263}
264
265#[derive(Debug, Clone, Serialize)]
266pub struct Transaction {
267 pub timestamp: String,
268 pub amount: f64,
269 pub description: String,
270}
271
272impl CreditTracker {
273 pub fn record_savings(&mut self, tokens_saved: u64, description: &str) {
274 let amount = tokens_saved as f64;
275 self.balance += amount;
276 self.total_earned += amount;
277 self.transactions.push(Transaction {
278 timestamp: chrono::Utc::now().to_rfc3339(),
279 amount,
280 description: description.to_string(),
281 });
282 }
283}
284
285pub async fn start_daemon(config: DaemonConfig) -> Result<()> {
287 println!(
288 r#"
289 โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
290 โ โ
291 โ ๐ณ SMART TREE DAEMON - System AI Context Service ๐ณ โ
292 โ โ
293 โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
294 "#
295 );
296
297 let auth_token = load_or_create_token()?;
299 println!(" ๐ Auth token: loaded ({})", token_path().display());
300
301 let (shutdown_tx, shutdown_rx) = oneshot::channel::<()>();
303
304 let llm_proxy = LlmProxy::default();
306 let provider_count = llm_proxy.providers.len();
307
308 let proxy_memory = ProxyMemory::new().unwrap_or_else(|e| {
310 eprintln!("Warning: Could not initialize proxy memory: {}", e);
311 eprintln!(" Falling back to in-memory only mode (no persistence)");
312 ProxyMemory::in_memory_only()
314 });
315
316 let collab_hub = create_hub();
318
319 let sessions = create_session_store();
321
322 let github_oauth = GitHubOAuthConfig::from_env();
324 if github_oauth.is_some() {
325 println!(" ๐ GitHub OAuth: configured");
326 }
327
328 let hot_watcher = Arc::new(RwLock::new(HotWatcher::new()));
330 println!(" ๐ฅ Hot Watcher: ready (MEM8 waves)");
331
332 let state = Arc::new(RwLock::new(DaemonState {
333 context: SystemContext::default(),
334 credits: CreditTracker::default(),
335 config: config.clone(),
336 shutdown_tx: Some(shutdown_tx),
337 llm_proxy,
338 proxy_memory,
339 collab_hub,
340 sessions,
341 github_oauth,
342 hot_watcher,
343 }));
344
345 println!(" ๐ค LLM Providers: {} available", provider_count);
346
347 {
349 let mut s = state.write().await;
350 scan_system_context(&mut s.context, &config.watch_paths)?;
351 }
352
353 let state_clone = Arc::clone(&state);
355 let watch_paths = config.watch_paths.clone();
356 tokio::spawn(async move {
357 loop {
358 tokio::time::sleep(tokio::time::Duration::from_secs(300)).await;
359 if let Ok(mut s) = state_clone.try_write() {
360 let _ = scan_system_context(&mut s.context, &watch_paths);
361 }
362 }
363 });
364
365 let mcp_context = create_mcp_context();
367
368 let app = Router::new()
369 .route("/", get(welcome_page))
371 .route("/health", get(health))
373 .route("/info", get(info))
374 .route("/settings", get(get_settings))
375 .route("/settings", post(update_settings))
376 .route("/context", get(get_context))
378 .route("/context/projects", get(get_projects))
379 .route("/context/query", post(query_context))
380 .route("/context/files", get(list_files))
381 .route("/credits", get(get_credits))
383 .route("/credits/record", post(record_credit))
384 .route("/tools", get(list_tools))
386 .route("/tools/call", post(call_tool))
387 .route("/v1/chat/completions", post(chat_completions))
389 .route("/v1/models", get(list_models))
390 .route("/collab/presence", get(collab_presence))
392 .route("/collab/ws", get(collab_websocket_handler))
393 .route("/ws", get(websocket_handler))
395 .route("/shutdown", post(shutdown_handler))
397 .route("/ping", get(ping))
398 .route("/cli/scan", post(crate::daemon_cli::cli_scan_handler))
400 .route("/cli/stream", post(crate::daemon_cli::cli_stream_handler))
401 .route("/watch", post(watch_directory))
403 .route("/watch", axum::routing::delete(unwatch_directory))
404 .route("/watch/status", get(watch_status))
405 .route("/watch/hot", get(watch_hot_directories))
406 .with_state(state)
407 .layer(middleware::from_fn_with_state(load_all_tokens(), auth_middleware))
409 .nest_service("/mcp", mcp_router(mcp_context));
412
413 let bind_addr: [u8; 4] = if config.allow_external {
414 [0, 0, 0, 0]
415 } else {
416 [127, 0, 0, 1]
417 };
418 let addr = SocketAddr::from((bind_addr, config.port));
419 println!("Smart Tree Daemon listening on http://{}", addr);
420 if !config.allow_external {
421 println!(" ๐ Bound to localhost only (set allow_external=true in ~/.st/config.toml to allow external)");
422 }
423 println!(" - CLI Scan: /cli/scan (thin-client endpoint!)");
424 println!(" - CLI Stream: /cli/stream (SSE streaming)");
425 println!(" - MCP HTTP: /mcp/* (The Custodian watching!) ๐งน");
426 println!(" - Context API: /context");
427 println!(" - Credits: /credits");
428 println!(" - Tools: /tools (legacy)");
429 println!(" - LLM Proxy: /v1/chat/completions (OpenAI-compatible!)");
430 println!(" - Models: /v1/models");
431 println!(" - Collab: /collab/ws (Hot Tub Mode!) ๐");
432 println!(" - Hot Watcher: /watch (MEM8 real-time intelligence) ๐ฅ");
433 println!(" - WebSocket: /ws");
434 println!(" - Shutdown: POST /shutdown");
435
436 let listener = tokio::net::TcpListener::bind(addr).await?;
437
438 axum::serve(listener, app)
440 .with_graceful_shutdown(async {
441 shutdown_rx.await.ok();
442 println!("\n๐ณ Smart Tree Daemon shutting down gracefully...");
443 })
444 .await?;
445
446 println!("๐ณ Smart Tree Daemon stopped.");
447 Ok(())
448}
449
450async fn welcome_page() -> axum::response::Html<&'static str> {
453 axum::response::Html(r#"<!DOCTYPE html>
454<html lang="en">
455<head>
456 <meta charset="UTF-8">
457 <meta name="viewport" content="width=device-width, initial-scale=1.0">
458 <title>Smart Tree Daemon</title>
459 <style>
460 * { margin: 0; padding: 0; box-sizing: border-box; }
461 body {
462 font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
463 background: linear-gradient(135deg, #1a1a2e 0%, #16213e 100%);
464 color: #e0e0e0; min-height: 100vh; padding: 2rem;
465 }
466 .header { text-align: center; margin-bottom: 2rem; }
467 .header h1 { font-size: 2.5rem; }
468 .header .emoji { font-size: 3rem; }
469 .grid { display: grid; grid-template-columns: 1fr 1fr; gap: 1.5rem; max-width: 1200px; margin: 0 auto; }
470 @media (max-width: 800px) { .grid { grid-template-columns: 1fr; } }
471 .card {
472 background: rgba(255,255,255,0.05); border-radius: 12px;
473 padding: 1.5rem; border: 1px solid rgba(255,255,255,0.1);
474 }
475 .card h2 { font-size: 1.1rem; color: #4ecdc4; margin-bottom: 1rem; display: flex; align-items: center; gap: 0.5rem; }
476 .endpoint { display: flex; justify-content: space-between; padding: 0.4rem 0; font-size: 0.9rem; }
477 .endpoint a { color: #4ecdc4; text-decoration: none; }
478 .endpoint a:hover { text-decoration: underline; }
479
480 /* Chat */
481 #chat-messages {
482 height: 200px; overflow-y: auto; background: rgba(0,0,0,0.3);
483 border-radius: 8px; padding: 1rem; margin-bottom: 1rem; font-size: 0.85rem;
484 }
485 .msg { margin-bottom: 0.5rem; padding: 0.5rem; border-radius: 6px; }
486 .msg.user { background: rgba(78,205,196,0.2); text-align: right; }
487 .msg.ai { background: rgba(243,156,18,0.2); }
488 .msg .model { font-size: 0.7rem; color: #888; }
489 .msg .score { font-size: 0.7rem; padding: 2px 6px; border-radius: 4px; margin-left: 0.5rem; }
490 .score.safe { background: #27ae60; color: white; }
491 .score.warn { background: #f39c12; color: black; }
492 .score.danger { background: #e74c3c; color: white; }
493 #chat-input { display: flex; gap: 0.5rem; }
494 #chat-input input {
495 flex: 1; padding: 0.75rem; border-radius: 8px; border: none;
496 background: rgba(255,255,255,0.1); color: white;
497 }
498 #chat-input select { padding: 0.5rem; border-radius: 8px; background: #2a2a4a; color: white; border: none; }
499 #chat-input button {
500 padding: 0.75rem 1.5rem; border-radius: 8px; border: none;
501 background: #4ecdc4; color: #1a1a2e; font-weight: bold; cursor: pointer;
502 }
503
504 /* Transparency Log */
505 #transparency-log {
506 height: 250px; overflow-y: auto; background: rgba(0,0,0,0.3);
507 border-radius: 8px; padding: 0.5rem; font-family: monospace; font-size: 0.75rem;
508 }
509 .log-entry { padding: 0.4rem; border-bottom: 1px solid rgba(255,255,255,0.05); }
510 .log-entry .time { color: #888; }
511 .log-entry .type { padding: 2px 6px; border-radius: 3px; font-size: 0.65rem; }
512 .log-entry .type.mcp { background: #9b59b6; }
513 .log-entry .type.llm { background: #3498db; }
514 .log-entry .type.tool { background: #e67e22; }
515 .log-entry .content { color: #ccc; margin-top: 0.25rem; word-break: break-all; }
516
517 /* Dashboard link */
518 .dashboard-link {
519 display: inline-block; margin-top: 1rem; padding: 0.75rem 2rem;
520 background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
521 color: white; text-decoration: none; border-radius: 8px; font-weight: bold;
522 }
523 .dashboard-link:hover { opacity: 0.9; }
524
525 .custodian { text-align: center; color: #f39c12; margin-top: 1.5rem; }
526 </style>
527</head>
528<body>
529 <div class="header">
530 <div class="emoji">๐ณ</div>
531 <h1>Smart Tree Daemon</h1>
532 <p style="color:#888">System AI Context Service</p>
533 <p style="color:#4ecdc4;margin-top:1rem;">You're viewing the Smart Tree Dashboard</p>
534 <p style="color:#888;font-size:0.85rem;">Bookmark this page: <strong>http://localhost:28428</strong></p>
535 </div>
536
537 <div class="grid">
538 <!-- Chat Test -->
539 <div class="card">
540 <h2>๐ฌ Test Chat (LLM Proxy via OpenRouter)</h2>
541 <div id="chat-messages"></div>
542 <div id="chat-input">
543 <select id="model-select">
544 <optgroup label="Top Coding Models">
545 <option value="anthropic/claude-opus-4.5">Claude Opus 4.5</option>
546 <option value="anthropic/claude-sonnet-4.5">Claude Sonnet 4.5</option>
547 <option value="openai/gpt-5.2-codex">GPT-5.2 Codex</option>
548 <option value="google/gemini-3-pro-preview">Gemini 3 Pro</option>
549 <option value="deepseek/deepseek-v3.2">DeepSeek V3.2</option>
550 <option value="qwen/qwen3-coder">Qwen3 Coder 480B</option>
551 </optgroup>
552 <optgroup label="Fast & Efficient">
553 <option value="anthropic/claude-haiku-4.5">Claude Haiku 4.5</option>
554 <option value="x-ai/grok-code-fast-1">Grok Code Fast</option>
555 <option value="google/gemini-3-flash-preview">Gemini 3 Flash</option>
556 <option value="moonshotai/kimi-k2.5">Kimi K2.5</option>
557 </optgroup>
558 <optgroup label="Free Tier">
559 <option value="google/gemini-2.0-flash-exp:free">Gemini 2.0 Flash (Free)</option>
560 <option value="z-ai/glm-4.5-air:free">GLM 4.5 Air (Free)</option>
561 </optgroup>
562 </select>
563 <input type="text" id="msg-input" placeholder="Type a message..." onkeypress="if(event.key==='Enter')sendChat()">
564 <button onclick="sendChat()">Send</button>
565 </div>
566 <p style="font-size:0.7rem;color:#666;margin-top:0.5rem;">Uses OpenRouter - add OPENROUTER_API_KEY to config</p>
567 </div>
568
569 <!-- Transparency Log -->
570 <div class="card">
571 <h2>๐๏ธ Transparency Mode</h2>
572 <p style="font-size:0.8rem;color:#888;margin-bottom:0.5rem">All AI communications logged here</p>
573 <div id="transparency-log">
574 <div class="log-entry">
575 <span class="time">--:--:--</span>
576 <span class="type mcp">SYSTEM</span>
577 <div class="content">Transparency mode active. Watching all AI traffic...</div>
578 </div>
579 </div>
580 </div>
581
582 <!-- API Endpoints -->
583 <div class="card">
584 <h2>๐ API Endpoints</h2>
585 <div class="endpoint"><span>Health</span><a href="/health">/health</a></div>
586 <div class="endpoint"><span>Info</span><a href="/info">/info</a></div>
587 <div class="endpoint"><span>Context</span><a href="/context">/context</a></div>
588 <div class="endpoint"><span>MCP Tools</span><a href="/mcp/tools/list">/mcp/tools/list</a></div>
589 <div class="endpoint"><span>Models</span><a href="/v1/models">/v1/models</a></div>
590 <div class="endpoint"><span>Chat API</span><span>/v1/chat/completions</span></div>
591 </div>
592
593 <!-- Model Safety -->
594 <div class="card">
595 <h2>๐ก๏ธ Model Safety Scores</h2>
596 <p style="font-size:0.8rem;color:#888;margin-bottom:1rem">Based on observed behavior</p>
597 <div class="endpoint">
598 <span>Claude 3.5 Sonnet</span>
599 <span class="score safe">10/10</span>
600 </div>
601 <div class="endpoint">
602 <span>GPT-4o</span>
603 <span class="score safe">9/10</span>
604 </div>
605 <div class="endpoint">
606 <span>Gemini 2.0</span>
607 <span class="score safe">9/10</span>
608 </div>
609 <div class="endpoint">
610 <span style="color:#e74c3c">greatcoderMDK</span>
611 <span class="score danger">2/10</span>
612 </div>
613 </div>
614
615 <!-- Settings -->
616 <div class="card" style="grid-column: 1 / -1;">
617 <h2>โ๏ธ Configuration</h2>
618 <p style="font-size:0.8rem;color:#888;margin-bottom:1rem">
619 Edit <code>~/.st/config.toml</code> to add API keys and preferences
620 </p>
621 <div style="display:flex;gap:1rem;flex-wrap:wrap;">
622 <a href="/settings" class="dashboard-link" style="font-size:0.9rem;padding:0.5rem 1rem;">View Config</a>
623 <a href="/v1/models" class="dashboard-link" style="font-size:0.9rem;padding:0.5rem 1rem;background:linear-gradient(135deg,#27ae60,#2ecc71);">Available Models</a>
624 </div>
625 <pre id="config-preview" style="margin-top:1rem;background:rgba(0,0,0,0.3);padding:1rem;border-radius:8px;font-size:0.75rem;max-height:150px;overflow:auto;display:none;"></pre>
626 </div>
627 </div>
628
629 <p class="custodian">๐งน The Custodian is watching all operations</p>
630
631 <script>
632 const messages = document.getElementById('chat-messages');
633 const log = document.getElementById('transparency-log');
634
635 function addLog(type, content) {
636 const time = new Date().toLocaleTimeString();
637 const typeClass = type.toLowerCase().includes('mcp') ? 'mcp' :
638 type.toLowerCase().includes('llm') ? 'llm' : 'tool';
639 log.innerHTML += `<div class="log-entry">
640 <span class="time">${time}</span>
641 <span class="type ${typeClass}">${type}</span>
642 <div class="content">${content.substring(0, 200)}${content.length > 200 ? '...' : ''}</div>
643 </div>`;
644 log.scrollTop = log.scrollHeight;
645 }
646
647 async function sendChat() {
648 const input = document.getElementById('msg-input');
649 const model = document.getElementById('model-select').value;
650 const msg = input.value.trim();
651 if (!msg) return;
652
653 // Add user message
654 messages.innerHTML += `<div class="msg user">${msg}</div>`;
655 input.value = '';
656
657 // Log the request
658 addLog('LLM-REQ', `Model: ${model} | "${msg}"`);
659
660 try {
661 const res = await fetch('/v1/chat/completions', {
662 method: 'POST',
663 headers: { 'Content-Type': 'application/json' },
664 body: JSON.stringify({
665 model: model,
666 messages: [{ role: 'user', content: msg }],
667 max_tokens: 500
668 })
669 });
670
671 const data = await res.json();
672 let reply = data.choices?.[0]?.message?.content;
673 if (!reply) {
674 // Handle error objects
675 if (data.error) {
676 reply = typeof data.error === 'object' ?
677 (data.error.message || JSON.stringify(data.error)) : data.error;
678 } else {
679 reply = 'No response';
680 }
681 }
682 const safeScore = model.includes('claude') ? 'safe' :
683 model.includes('greatcoder') ? 'danger' : 'safe';
684
685 messages.innerHTML += `<div class="msg ai">
686 <span class="model">${model}</span>
687 <span class="score ${safeScore}">${safeScore === 'safe' ? 'โ' : 'โ '}</span>
688 <div>${reply}</div>
689 </div>`;
690
691 addLog('LLM-RES', `${model}: ${typeof reply === 'string' ? reply : JSON.stringify(reply)}`);
692 } catch (e) {
693 messages.innerHTML += `<div class="msg ai" style="color:#e74c3c">Error: ${e.message}</div>`;
694 addLog('ERROR', e.message);
695 }
696
697 messages.scrollTop = messages.scrollHeight;
698 }
699
700 // Connect to WebSocket for live transparency
701 try {
702 const ws = new WebSocket(`ws://${location.host}/ws`);
703 ws.onmessage = (e) => {
704 try {
705 const data = JSON.parse(e.data);
706 addLog(data.type || 'EVENT', JSON.stringify(data));
707 } catch { addLog('RAW', e.data); }
708 };
709 ws.onopen = () => addLog('SYSTEM', 'WebSocket connected for live updates');
710 } catch (e) { console.log('WS not available'); }
711 </script>
712</body>
713</html>"#)
714}
715
716async fn health() -> &'static str {
717 "ok"
718}
719
720#[derive(Serialize)]
721struct InfoResponse {
722 name: &'static str,
723 version: &'static str,
724 description: &'static str,
725}
726
727async fn info() -> Json<InfoResponse> {
728 Json(InfoResponse {
729 name: "smart-tree-daemon",
730 version: env!("CARGO_PKG_VERSION"),
731 description: "System-wide AI context service with Foken credit tracking",
732 })
733}
734
735async fn get_settings() -> axum::response::Response {
737 use axum::response::IntoResponse;
738 use crate::config::StConfig;
739
740 match StConfig::load() {
741 Ok(config) => {
742 match toml::to_string_pretty(&config) {
744 Ok(toml_str) => {
745 let html = format!(r#"<!DOCTYPE html>
746<html><head><title>Smart Tree Config</title>
747<style>
748body {{ font-family: monospace; background: #1a1a2e; color: #e0e0e0; padding: 2rem; }}
749pre {{ background: rgba(0,0,0,0.3); padding: 1rem; border-radius: 8px; overflow-x: auto; }}
750h1 {{ color: #4ecdc4; }}
751.path {{ color: #888; font-size: 0.9rem; }}
752a {{ color: #4ecdc4; }}
753</style></head><body>
754<h1>โ๏ธ Smart Tree Configuration</h1>
755<p class="path">File: ~/.st/config.toml</p>
756<pre>{}</pre>
757<p><a href="/">โ Back to Dashboard</a></p>
758</body></html>"#, toml_str);
759 axum::response::Html(html).into_response()
760 }
761 Err(e) => (
762 axum::http::StatusCode::INTERNAL_SERVER_ERROR,
763 format!("Failed to serialize config: {}", e)
764 ).into_response()
765 }
766 }
767 Err(e) => (
768 axum::http::StatusCode::INTERNAL_SERVER_ERROR,
769 format!("Failed to load config: {}", e)
770 ).into_response()
771 }
772}
773
774async fn update_settings(
776 axum::extract::Json(updates): axum::extract::Json<serde_json::Value>
777) -> axum::response::Response {
778 use axum::response::IntoResponse;
779 use crate::config::StConfig;
780
781 let mut config = match StConfig::load() {
783 Ok(c) => c,
784 Err(e) => return (
785 axum::http::StatusCode::INTERNAL_SERVER_ERROR,
786 format!("Failed to load config: {}", e)
787 ).into_response()
788 };
789
790 if let Some(api_keys) = updates.get("api_keys").and_then(|v| v.as_object()) {
792 if let Some(key) = api_keys.get("anthropic").and_then(|v| v.as_str()) {
793 config.api_keys.anthropic = Some(key.to_string());
794 }
795 if let Some(key) = api_keys.get("openai").and_then(|v| v.as_str()) {
796 config.api_keys.openai = Some(key.to_string());
797 }
798 if let Some(key) = api_keys.get("google").and_then(|v| v.as_str()) {
799 config.api_keys.google = Some(key.to_string());
800 }
801 }
802
803 match config.save() {
805 Ok(_) => Json(serde_json::json!({"status": "ok", "message": "Config updated"})).into_response(),
806 Err(e) => (
807 axum::http::StatusCode::INTERNAL_SERVER_ERROR,
808 format!("Failed to save config: {}", e)
809 ).into_response()
810 }
811}
812
813#[derive(Serialize)]
814struct ContextResponse {
815 projects_count: usize,
816 directories_count: usize,
817 last_scan: Option<String>,
818 credits_balance: f64,
819}
820
821async fn get_context(State(state): State<Arc<RwLock<DaemonState>>>) -> Json<ContextResponse> {
822 let s = state.read().await;
823 Json(ContextResponse {
824 projects_count: s.context.projects.len(),
825 directories_count: s.context.consciousnesses.len(),
826 last_scan: s
827 .context
828 .last_scan
829 .map(|t| chrono::DateTime::<chrono::Utc>::from(t).to_rfc3339()),
830 credits_balance: s.credits.balance,
831 })
832}
833
834async fn get_projects(State(state): State<Arc<RwLock<DaemonState>>>) -> Json<Vec<ProjectInfo>> {
835 let s = state.read().await;
836 Json(s.context.projects.values().cloned().collect())
837}
838
839#[derive(Deserialize)]
840struct ContextQuery {
841 query: String,
842}
843
844#[derive(Serialize)]
845struct QueryResult {
846 projects: Vec<ProjectInfo>,
847 files: Vec<String>,
848 suggestion: String,
849}
850
851async fn query_context(
852 State(state): State<Arc<RwLock<DaemonState>>>,
853 Json(req): Json<ContextQuery>,
854) -> Json<QueryResult> {
855 let s = state.read().await;
856 let query_lower = req.query.to_lowercase();
857
858 let projects: Vec<ProjectInfo> = s
860 .context
861 .projects
862 .values()
863 .filter(|p| {
864 p.name.to_lowercase().contains(&query_lower)
865 || p.essence.to_lowercase().contains(&query_lower)
866 || p.key_files
867 .iter()
868 .any(|f| f.to_lowercase().contains(&query_lower))
869 })
870 .cloned()
871 .collect();
872
873 let files: Vec<String> = projects
875 .iter()
876 .flat_map(|p| p.key_files.iter().map(|f| format!("{}/{}", p.path, f)))
877 .take(20)
878 .collect();
879
880 let suggestion = if projects.is_empty() {
881 format!(
882 "No projects found matching '{}'. Try a different query.",
883 req.query
884 )
885 } else {
886 format!(
887 "Found {} projects. Top match: {}",
888 projects.len(),
889 projects[0].name
890 )
891 };
892
893 Json(QueryResult {
894 projects,
895 files,
896 suggestion,
897 })
898}
899
900#[derive(Deserialize)]
901struct ListFilesQuery {
902 path: Option<String>,
903 pattern: Option<String>,
904 depth: Option<usize>,
905}
906
907async fn list_files(Query(params): Query<ListFilesQuery>) -> Json<Vec<String>> {
908 use walkdir::WalkDir;
909
910 let path = params.path.unwrap_or_else(|| ".".to_string());
911 let depth = params.depth.unwrap_or(3);
912
913 let files: Vec<String> = WalkDir::new(&path)
914 .max_depth(depth)
915 .into_iter()
916 .filter_map(|e| e.ok())
917 .filter(|e| e.path().is_file())
918 .filter(|e| {
919 if let Some(ref pat) = params.pattern {
920 e.path().to_string_lossy().contains(pat)
921 } else {
922 true
923 }
924 })
925 .take(100)
926 .map(|e| e.path().to_string_lossy().to_string())
927 .collect();
928
929 Json(files)
930}
931
932#[derive(Serialize)]
933struct CreditsResponse {
934 balance: f64,
935 total_earned: f64,
936 total_spent: f64,
937 recent_transactions: Vec<Transaction>,
938}
939
940async fn get_credits(State(state): State<Arc<RwLock<DaemonState>>>) -> Json<CreditsResponse> {
941 let s = state.read().await;
942 Json(CreditsResponse {
943 balance: s.credits.balance,
944 total_earned: s.credits.total_earned,
945 total_spent: s.credits.total_spent,
946 recent_transactions: s
947 .credits
948 .transactions
949 .iter()
950 .rev()
951 .take(10)
952 .cloned()
953 .collect(),
954 })
955}
956
957#[derive(Deserialize)]
958struct RecordCreditRequest {
959 tokens_saved: u64,
960 description: String,
961}
962
963async fn record_credit(
964 State(state): State<Arc<RwLock<DaemonState>>>,
965 Json(req): Json<RecordCreditRequest>,
966) -> Json<CreditsResponse> {
967 let mut s = state.write().await;
968 s.credits.record_savings(req.tokens_saved, &req.description);
969
970 Json(CreditsResponse {
971 balance: s.credits.balance,
972 total_earned: s.credits.total_earned,
973 total_spent: s.credits.total_spent,
974 recent_transactions: s
975 .credits
976 .transactions
977 .iter()
978 .rev()
979 .take(10)
980 .cloned()
981 .collect(),
982 })
983}
984
985#[derive(Serialize)]
986struct Tool {
987 name: String,
988 description: String,
989}
990
991async fn list_tools() -> Json<Vec<Tool>> {
992 Json(vec![
993 Tool {
994 name: "get_context".to_string(),
995 description: "Get system context summary".to_string(),
996 },
997 Tool {
998 name: "list_projects".to_string(),
999 description: "List all detected projects".to_string(),
1000 },
1001 Tool {
1002 name: "query_context".to_string(),
1003 description: "Search context by keyword".to_string(),
1004 },
1005 Tool {
1006 name: "list_files".to_string(),
1007 description: "List files in a directory".to_string(),
1008 },
1009 Tool {
1010 name: "get_credits".to_string(),
1011 description: "Get Foken credit balance".to_string(),
1012 },
1013 Tool {
1014 name: "record_savings".to_string(),
1015 description: "Record token compression savings".to_string(),
1016 },
1017 ])
1018}
1019
1020#[derive(Deserialize)]
1021struct ToolCall {
1022 name: String,
1023 arguments: serde_json::Value,
1024}
1025
1026async fn call_tool(
1027 State(state): State<Arc<RwLock<DaemonState>>>,
1028 Json(call): Json<ToolCall>,
1029) -> impl IntoResponse {
1030 match call.name.as_str() {
1031 "get_context" => {
1032 let s = state.read().await;
1033 (
1034 StatusCode::OK,
1035 Json(serde_json::json!({
1036 "projects": s.context.projects.len(),
1037 "directories": s.context.consciousnesses.len(),
1038 "credits": s.credits.balance
1039 })),
1040 )
1041 }
1042 "list_projects" => {
1043 let s = state.read().await;
1044 let projects: Vec<_> = s.context.projects.values().cloned().collect();
1045 (
1046 StatusCode::OK,
1047 Json(serde_json::json!({ "projects": projects })),
1048 )
1049 }
1050 "list_files" => {
1051 let path = call
1052 .arguments
1053 .get("path")
1054 .and_then(|v| v.as_str())
1055 .unwrap_or(".");
1056 let depth = call
1057 .arguments
1058 .get("depth")
1059 .and_then(|v| v.as_u64())
1060 .unwrap_or(3) as usize;
1061
1062 use walkdir::WalkDir;
1063 let files: Vec<String> = WalkDir::new(path)
1064 .max_depth(depth)
1065 .into_iter()
1066 .filter_map(|e| e.ok())
1067 .filter(|e| e.path().is_file())
1068 .take(100)
1069 .map(|e| e.path().to_string_lossy().to_string())
1070 .collect();
1071
1072 (StatusCode::OK, Json(serde_json::json!({ "files": files })))
1073 }
1074 _ => (
1075 StatusCode::NOT_FOUND,
1076 Json(serde_json::json!({
1077 "error": format!("Unknown tool: {}", call.name)
1078 })),
1079 ),
1080 }
1081}
1082
1083async fn websocket_handler(
1084 ws: WebSocketUpgrade,
1085 State(_state): State<Arc<RwLock<DaemonState>>>,
1086) -> impl IntoResponse {
1087 ws.on_upgrade(|_socket| async {
1088 })
1091}
1092
1093async fn collab_presence(
1097 State(state): State<Arc<RwLock<DaemonState>>>,
1098) -> Json<serde_json::Value> {
1099 let s = state.read().await;
1100 let hub = s.collab_hub.read().await;
1101 let presence = hub.get_presence();
1102 let hot_tub_count = presence.iter().filter(|p| p.in_hot_tub).count();
1103
1104 Json(serde_json::json!({
1105 "participants": presence,
1106 "total": presence.len(),
1107 "hot_tub_count": hot_tub_count,
1108 "hot_tub_open": hub.is_hot_tub_open()
1109 }))
1110}
1111
1112async fn collab_websocket_handler(
1114 ws: WebSocketUpgrade,
1115 State(state): State<Arc<RwLock<DaemonState>>>,
1116) -> impl IntoResponse {
1117 let hub = state.read().await.collab_hub.clone();
1118 ws.on_upgrade(move |socket| handle_collab_connection(socket, hub))
1119}
1120
1121async fn handle_collab_connection(
1123 socket: axum::extract::ws::WebSocket,
1124 hub: SharedCollabHub,
1125) {
1126 use axum::extract::ws::Message;
1127 use futures::{SinkExt, StreamExt};
1128 use crate::collaboration::{Participant, ParticipantType};
1129
1130 let (mut sender, mut receiver) = socket.split();
1131
1132 let participant_id = loop {
1134 match receiver.next().await {
1135 Some(Ok(Message::Text(text))) => {
1136 #[derive(serde::Deserialize)]
1137 struct JoinMsg {
1138 action: String,
1139 name: String,
1140 participant_type: Option<String>,
1141 }
1142
1143 if let Ok(join) = serde_json::from_str::<JoinMsg>(&text) {
1144 if join.action == "join" {
1145 let ptype = join.participant_type
1146 .map(|s| match s.to_lowercase().as_str() {
1147 "human" | "user" => ParticipantType::Human,
1148 "claude" => ParticipantType::Claude,
1149 "omni" => ParticipantType::Omni,
1150 "grok" => ParticipantType::Grok,
1151 _ => ParticipantType::Unknown,
1152 })
1153 .unwrap_or(ParticipantType::Unknown);
1154
1155 let participant = Participant::new(join.name.clone(), ptype);
1156 let id = hub.write().await.join(participant);
1157
1158 let welcome = serde_json::json!({
1160 "type": "welcome",
1161 "participant_id": id,
1162 "name": join.name
1163 });
1164 let _ = sender.send(Message::Text(welcome.to_string())).await;
1165 break id;
1166 }
1167 }
1168 }
1169 Some(Ok(Message::Close(_))) | None => return,
1170 _ => continue,
1171 }
1172 };
1173
1174 let mut broadcast_rx = hub.read().await.subscribe();
1176
1177 let _hub_for_send = hub.clone();
1179 let pid_for_send = participant_id.clone();
1180 let mut send_task = tokio::spawn(async move {
1181 while let Ok(msg) = broadcast_rx.recv().await {
1182 let json = serde_json::to_string(&msg).unwrap_or_default();
1183 if sender.send(Message::Text(json)).await.is_err() {
1184 break;
1185 }
1186 }
1187 pid_for_send
1188 });
1189
1190 let hub_for_recv = hub.clone();
1192 let pid_for_recv = participant_id.clone();
1193 let mut recv_task = tokio::spawn(async move {
1194 while let Some(Ok(msg)) = receiver.next().await {
1195 if let Message::Text(text) = msg {
1196 #[derive(serde::Deserialize)]
1197 #[serde(tag = "action")]
1198 enum ClientMsg {
1199 #[serde(rename = "chat")]
1200 Chat { message: String },
1201 #[serde(rename = "hot_tub")]
1202 HotTub,
1203 #[serde(rename = "status")]
1204 Status { status: Option<String>, working_on: Option<String> },
1205 }
1206
1207 if let Ok(client_msg) = serde_json::from_str::<ClientMsg>(&text) {
1208 match client_msg {
1209 ClientMsg::Chat { message } => {
1210 hub_for_recv.read().await.chat(&pid_for_recv, message);
1211 }
1212 ClientMsg::HotTub => {
1213 hub_for_recv.write().await.toggle_hot_tub(&pid_for_recv);
1214 }
1215 ClientMsg::Status { status, working_on } => {
1216 hub_for_recv.write().await.update_status(&pid_for_recv, status, working_on);
1217 }
1218 }
1219 }
1220 }
1221 }
1222 pid_for_recv
1223 });
1224
1225 tokio::select! {
1227 _ = &mut send_task => recv_task.abort(),
1228 _ = &mut recv_task => send_task.abort(),
1229 }
1230
1231 hub.write().await.leave(&participant_id);
1233}
1234
1235async fn ping() -> &'static str {
1237 "pong"
1238}
1239
1240async fn shutdown_handler(State(state): State<Arc<RwLock<DaemonState>>>) -> impl IntoResponse {
1242 let mut s = state.write().await;
1244 if let Some(tx) = s.shutdown_tx.take() {
1245 let _ = tx.send(());
1247 (
1248 StatusCode::OK,
1249 Json(serde_json::json!({
1250 "status": "shutting_down",
1251 "message": "Smart Tree Daemon is shutting down gracefully"
1252 })),
1253 )
1254 } else {
1255 (
1256 StatusCode::CONFLICT,
1257 Json(serde_json::json!({
1258 "status": "error",
1259 "message": "Shutdown already in progress"
1260 })),
1261 )
1262 }
1263}
1264
1265fn scan_system_context(context: &mut SystemContext, watch_paths: &[PathBuf]) -> Result<()> {
1267 use walkdir::WalkDir;
1268
1269 for path in watch_paths {
1270 if !path.exists() {
1271 continue;
1272 }
1273
1274 for entry in WalkDir::new(path)
1275 .max_depth(3)
1276 .follow_links(false)
1277 .into_iter()
1278 .filter_map(|e| e.ok())
1279 {
1280 let entry_path = entry.path();
1281
1282 if entry_path
1284 .file_name()
1285 .map(|n| n.to_string_lossy().starts_with('.'))
1286 .unwrap_or(false)
1287 {
1288 continue;
1289 }
1290
1291 if entry_path.is_dir() {
1292 if let Some(project) = detect_project(entry_path) {
1294 context.projects.insert(entry_path.to_path_buf(), project);
1295 }
1296
1297 if let Some(info) = create_directory_info(entry_path) {
1299 context
1300 .consciousnesses
1301 .insert(entry_path.to_path_buf(), info);
1302 }
1303 }
1304 }
1305 }
1306
1307 context.last_scan = Some(std::time::SystemTime::now());
1308 Ok(())
1309}
1310
1311fn detect_project(path: &std::path::Path) -> Option<ProjectInfo> {
1312 let markers = [
1313 ("Cargo.toml", "Rust"),
1314 ("package.json", "JavaScript"),
1315 ("pyproject.toml", "Python"),
1316 ("go.mod", "Go"),
1317 ];
1318
1319 for (marker, project_type) in markers {
1320 if path.join(marker).exists() {
1321 let name = path.file_name()?.to_string_lossy().to_string();
1322
1323 let key_files: Vec<String> = ["README.md", "CLAUDE.md", "src/main.rs", "src/lib.rs"]
1324 .iter()
1325 .filter(|f| path.join(f).exists())
1326 .map(|f| f.to_string())
1327 .collect();
1328
1329 let essence = read_essence(path).unwrap_or_else(|| format!("{} project", project_type));
1330
1331 return Some(ProjectInfo {
1332 path: path.to_string_lossy().to_string(),
1333 name,
1334 project_type: project_type.to_string(),
1335 key_files,
1336 essence,
1337 });
1338 }
1339 }
1340 None
1341}
1342
1343fn read_essence(path: &std::path::Path) -> Option<String> {
1344 for readme in ["CLAUDE.md", "README.md"] {
1345 let readme_path = path.join(readme);
1346 if readme_path.exists() {
1347 if let Ok(content) = std::fs::read_to_string(&readme_path) {
1348 for line in content.lines() {
1349 let line = line.trim();
1350 if !line.is_empty() && !line.starts_with('#') && !line.starts_with("```") {
1351 return Some(line.chars().take(100).collect());
1352 }
1353 }
1354 }
1355 }
1356 }
1357 None
1358}
1359
1360fn create_directory_info(path: &std::path::Path) -> Option<DirectoryInfo> {
1361 use std::collections::HashSet;
1362 use walkdir::WalkDir;
1363
1364 let mut file_count = 0;
1365 let mut extensions: HashSet<String> = HashSet::new();
1366
1367 for entry in WalkDir::new(path)
1368 .max_depth(1)
1369 .into_iter()
1370 .filter_map(|e| e.ok())
1371 {
1372 if entry.path().is_file() {
1373 file_count += 1;
1374 if let Some(ext) = entry.path().extension() {
1375 extensions.insert(ext.to_string_lossy().to_string());
1376 }
1377 }
1378 }
1379
1380 use std::hash::{Hash, Hasher};
1382 let mut hasher = std::collections::hash_map::DefaultHasher::new();
1383 path.hash(&mut hasher);
1384 let hash = hasher.finish();
1385 let frequency = 20.0 + (hash % 18000) as f64 / 100.0;
1386
1387 Some(DirectoryInfo {
1388 path: path.to_string_lossy().to_string(),
1389 frequency,
1390 file_count,
1391 patterns: extensions.into_iter().collect(),
1392 })
1393}
1394
1395async fn chat_completions(
1401 State(state): State<Arc<RwLock<DaemonState>>>,
1402 Json(req): Json<OpenAiRequest>,
1403) -> impl IntoResponse {
1404 let (provider_name, model_name) = if let Some((p, m)) = req.model.split_once('/') {
1407 (p.to_string(), m.to_string())
1408 } else {
1409 let model_lower = req.model.to_lowercase();
1411 let provider = if model_lower.starts_with("claude") {
1412 "openrouter" } else if model_lower.starts_with("gpt") || model_lower.starts_with("o1") || model_lower.starts_with("o3") {
1414 "openai"
1415 } else if model_lower.starts_with("gemini") {
1416 "google"
1417 } else if model_lower.starts_with("grok") {
1418 "grok"
1419 } else if model_lower.contains("llama") || model_lower.contains("mistral") || model_lower.contains("mixtral") {
1420 "openrouter" } else {
1422 "openrouter" };
1424 (provider.to_string(), req.model.clone())
1425 };
1426
1427 let internal_req = LlmRequest {
1428 model: model_name,
1429 messages: req.messages.into_iter().map(Into::into).collect(),
1430 temperature: req.temperature,
1431 max_tokens: req.max_tokens,
1432 stream: req.stream.unwrap_or(false),
1433 };
1434
1435 let scope_id = req.user.clone().unwrap_or_else(|| "global".to_string());
1437
1438 let request_with_history = {
1440 let state_lock = state.read().await;
1441
1442 let mut messages_with_history = Vec::new();
1444
1445 if let Some(system_msg) = internal_req
1447 .messages
1448 .iter()
1449 .find(|m| m.role == LlmRole::System)
1450 .cloned()
1451 {
1452 messages_with_history.push(system_msg);
1453 }
1454
1455 if let Some(scope) = state_lock.proxy_memory.get_scope(&scope_id) {
1457 for msg in &scope.messages {
1458 if msg.role != LlmRole::System {
1459 messages_with_history.push(msg.clone());
1460 }
1461 }
1462 }
1463
1464 for msg in &internal_req.messages {
1466 if msg.role != LlmRole::System {
1467 messages_with_history.push(msg.clone());
1468 }
1469 }
1470
1471 LlmRequest {
1472 messages: messages_with_history,
1473 ..internal_req.clone()
1474 }
1475 };
1476
1477 let llm_result = {
1479 let state_lock = state.read().await;
1480 state_lock
1481 .llm_proxy
1482 .complete(&provider_name, request_with_history)
1483 .await
1484 };
1485
1486 match llm_result {
1487 Ok(resp) => {
1488 let mut state_lock = state.write().await;
1490
1491 let mut new_history = Vec::new();
1493 if let Some(last_user_msg) = internal_req
1494 .messages
1495 .iter()
1496 .rev()
1497 .find(|m| m.role == LlmRole::User)
1498 {
1499 new_history.push(last_user_msg.clone());
1500 }
1501 new_history.push(LlmMessage {
1502 role: LlmRole::Assistant,
1503 content: resp.content.clone(),
1504 });
1505 let _ = state_lock.proxy_memory.update_scope(&scope_id, new_history);
1506
1507 let tokens_used = resp.usage.as_ref().map(|u| u.total_tokens).unwrap_or(0);
1509 if tokens_used > 0 {
1510 state_lock.credits.record_savings(
1511 tokens_used as u64 / 10, &format!("LLM call to {} ({})", provider_name, req.model),
1513 );
1514 }
1515
1516 (
1517 StatusCode::OK,
1518 Json(OpenAiResponse {
1519 id: format!("st-{}", uuid::Uuid::new_v4()),
1520 object: "chat.completion".to_string(),
1521 created: chrono::Utc::now().timestamp() as u64,
1522 model: req.model,
1523 choices: vec![OpenAiChoice {
1524 index: 0,
1525 message: OpenAiResponseMessage {
1526 role: "assistant".to_string(),
1527 content: resp.content,
1528 },
1529 finish_reason: "stop".to_string(),
1530 }],
1531 usage: resp.usage.map(|u| OpenAiUsage {
1532 prompt_tokens: u.prompt_tokens,
1533 completion_tokens: u.completion_tokens,
1534 total_tokens: u.total_tokens,
1535 }),
1536 }),
1537 )
1538 .into_response()
1539 }
1540 Err(e) => {
1541 let error_msg = format!("{}", e);
1542 let status = if error_msg.contains("not found") || error_msg.contains("invalid") {
1543 StatusCode::BAD_REQUEST
1544 } else if error_msg.contains("unauthorized") || error_msg.contains("authentication") {
1545 StatusCode::UNAUTHORIZED
1546 } else {
1547 StatusCode::INTERNAL_SERVER_ERROR
1548 };
1549
1550 (
1551 status,
1552 Json(OpenAiErrorResponse {
1553 error: OpenAiError {
1554 message: error_msg,
1555 error_type: "api_error".to_string(),
1556 code: None,
1557 },
1558 }),
1559 )
1560 .into_response()
1561 }
1562 }
1563}
1564
1565async fn list_models(State(state): State<Arc<RwLock<DaemonState>>>) -> Json<serde_json::Value> {
1567 let state_lock = state.read().await;
1568
1569 let models: Vec<serde_json::Value> = state_lock
1570 .llm_proxy
1571 .providers
1572 .iter()
1573 .map(|p| {
1574 serde_json::json!({
1575 "id": format!("{}/default", p.name().to_lowercase()),
1576 "object": "model",
1577 "owned_by": p.name(),
1578 })
1579 })
1580 .collect();
1581
1582 Json(serde_json::json!({
1583 "object": "list",
1584 "data": models
1585 }))
1586}
1587
1588#[derive(Deserialize)]
1594struct WatchRequest {
1595 path: String,
1596}
1597
1598#[derive(Serialize)]
1600struct WatchResponse {
1601 success: bool,
1602 path: String,
1603 message: String,
1604}
1605
1606async fn watch_directory(
1608 State(state): State<Arc<RwLock<DaemonState>>>,
1609 Json(req): Json<WatchRequest>,
1610) -> Result<Json<WatchResponse>, (StatusCode, String)> {
1611 let path = std::path::PathBuf::from(&req.path);
1612
1613 if !path.exists() {
1614 return Err((
1615 StatusCode::NOT_FOUND,
1616 format!("Path does not exist: {}", req.path),
1617 ));
1618 }
1619
1620 let state_lock = state.read().await;
1621 let mut watcher = state_lock.hot_watcher.write().await;
1622
1623 match watcher.watch(&path) {
1624 Ok(()) => Ok(Json(WatchResponse {
1625 success: true,
1626 path: req.path,
1627 message: "Now watching directory with MEM8 waves".to_string(),
1628 })),
1629 Err(e) => Err((
1630 StatusCode::INTERNAL_SERVER_ERROR,
1631 format!("Failed to watch: {}", e),
1632 )),
1633 }
1634}
1635
1636async fn unwatch_directory(
1638 State(state): State<Arc<RwLock<DaemonState>>>,
1639 Json(req): Json<WatchRequest>,
1640) -> Result<Json<WatchResponse>, (StatusCode, String)> {
1641 let path = std::path::PathBuf::from(&req.path);
1642
1643 let state_lock = state.read().await;
1644 let mut watcher = state_lock.hot_watcher.write().await;
1645
1646 match watcher.unwatch(&path) {
1647 Ok(()) => Ok(Json(WatchResponse {
1648 success: true,
1649 path: req.path,
1650 message: "Stopped watching directory".to_string(),
1651 })),
1652 Err(e) => Err((
1653 StatusCode::INTERNAL_SERVER_ERROR,
1654 format!("Failed to unwatch: {}", e),
1655 )),
1656 }
1657}
1658
1659#[derive(Serialize)]
1661struct WatchStatusResponse {
1662 total_watched: usize,
1663 critical: usize,
1664 hot: usize,
1665 warm: usize,
1666 cold: usize,
1667 average_arousal: f64,
1668}
1669
1670async fn watch_status(
1672 State(state): State<Arc<RwLock<DaemonState>>>,
1673) -> Json<WatchStatusResponse> {
1674 let state_lock = state.read().await;
1675 let watcher = state_lock.hot_watcher.read().await;
1676 let summary = watcher.summary();
1677
1678 Json(WatchStatusResponse {
1679 total_watched: summary.total_watched,
1680 critical: summary.critical,
1681 hot: summary.hot,
1682 warm: summary.warm,
1683 cold: summary.cold,
1684 average_arousal: summary.average_arousal,
1685 })
1686}
1687
1688#[derive(Serialize)]
1690struct WatchedDirectoryResponse {
1691 path: String,
1692 arousal: f64,
1693 valence: f64,
1694 frequency: f64,
1695 interest_level: String,
1696 security_findings: usize,
1697 is_hot: bool,
1698}
1699
1700async fn watch_hot_directories(
1702 State(state): State<Arc<RwLock<DaemonState>>>,
1703) -> Json<Vec<WatchedDirectoryResponse>> {
1704 let state_lock = state.read().await;
1705 let watcher = state_lock.hot_watcher.read().await;
1706 let hot_dirs = watcher.get_hot_directories();
1707
1708 let response: Vec<WatchedDirectoryResponse> = hot_dirs
1709 .into_iter()
1710 .map(|d| WatchedDirectoryResponse {
1711 path: d.path.display().to_string(),
1712 arousal: d.wave.arousal,
1713 valence: d.wave.emotional_valence,
1714 frequency: d.wave.frequency,
1715 interest_level: format!("{:?}", d.interest_level),
1716 security_findings: d.security_findings.len(),
1717 is_hot: d.is_hot(),
1718 })
1719 .collect();
1720
1721 Json(response)
1722}