Skip to main content

offline_intelligence/api/
title_api.rs

1// Title generation API: summarize first prompt into 1-5 word chat title via model inference.
2// Offline mode → local LLM worker.
3// Online mode  → OpenRouter proxy (frontend passes api_key + model_id; backend proxies the call
4//                so the WebView never connects to external URLs — Tauri CSP compliance).
5use axum::{
6    extract::{State, Json},
7    http::StatusCode,
8};
9use serde::{Deserialize, Serialize};
10use tracing::{info, warn};
11
12use crate::shared_state::UnifiedAppState;
13
14#[derive(Debug, Deserialize)]
15pub struct GenerateTitleRequest {
16    pub prompt: String,
17    #[serde(default = "default_max_tokens")]
18    pub max_tokens: u32,
19    /// Online mode: OpenRouter API key forwarded from the frontend.
20    #[serde(default)]
21    pub api_key: Option<String>,
22    /// Online mode: OpenRouter model ID (e.g. "anthropic/claude-3-haiku").
23    #[serde(default)]
24    pub model_id: Option<String>,
25}
26
27fn default_max_tokens() -> u32 {
28    20
29}
30
31#[derive(Debug, Serialize)]
32pub struct GenerateTitleResponse {
33    pub title: String,
34}
35
36#[derive(Debug, Serialize)]
37pub struct ErrorResponse {
38    pub error: String,
39}
40
41/// Generate a concise chat title (1-5 words) from a user prompt.
42///
43/// - **Offline mode** (no `api_key`/`model_id`): uses the local LLM worker directly.
44/// - **Online mode** (`api_key` + `model_id` present): proxies to OpenRouter on behalf of the
45///   frontend. The frontend must NOT call OpenRouter directly — Tauri's CSP blocks external
46///   `connect-src` origins from the WebView.
47pub async fn generate_title(
48    State(state): State<UnifiedAppState>,
49    Json(req): Json<GenerateTitleRequest>,
50) -> Result<Json<GenerateTitleResponse>, (StatusCode, Json<ErrorResponse>)> {
51    info!("Generating title for prompt ({} chars)", req.prompt.len());
52
53    if req.prompt.is_empty() {
54        return Err((
55            StatusCode::BAD_REQUEST,
56            Json(ErrorResponse {
57                error: "Prompt cannot be empty".to_string(),
58            }),
59        ));
60    }
61
62    // ── Online path: proxy to OpenRouter ─────────────────────────────────────
63    if let (Some(api_key), Some(model_id)) = (&req.api_key, &req.model_id) {
64        if !api_key.is_empty() && !model_id.is_empty() {
65            let model_id = model_id.replace("openrouter:", "").replace("openrouter/", "");
66            let prompt_snippet = req.prompt.chars().take(200).collect::<String>();
67            let user_content = format!(
68                "Create a short chat title (1-5 words) that captures the topic of this message. \
69                 Reply with ONLY the title, no quotes or punctuation:\n\"{prompt_snippet}\""
70            );
71
72            let client = reqwest::Client::new();
73            let body = serde_json::json!({
74                "model": model_id,
75                "messages": [{ "role": "user", "content": user_content }],
76                "max_tokens": req.max_tokens.min(20),
77                "temperature": 0.3,
78            });
79
80            match client
81                .post("https://openrouter.ai/api/v1/chat/completions")
82                .header("Authorization", format!("Bearer {api_key}"))
83                .header("Content-Type", "application/json")
84                .json(&body)
85                .send()
86                .await
87            {
88                Ok(res) if res.status().is_success() => {
89                    if let Ok(data) = res.json::<serde_json::Value>().await {
90                        if let Some(title) = data["choices"][0]["message"]["content"]
91                            .as_str()
92                            .map(str::trim)
93                            .filter(|s| !s.is_empty())
94                        {
95                            info!("OpenRouter title: '{title}'");
96                            return Ok(Json(GenerateTitleResponse {
97                                title: title.to_string(),
98                            }));
99                        }
100                    }
101                }
102                Ok(res) => {
103                    warn!("OpenRouter title request returned HTTP {}", res.status());
104                }
105                Err(e) => {
106                    warn!("OpenRouter title request failed: {e}");
107                }
108            }
109
110            // Fall through to local LLM if OpenRouter fails.
111        }
112    }
113
114    // ── Offline path: local LLM worker ───────────────────────────────────────
115    let title_instruction = format!(
116        "User prompt: {}\n\n\
117         Create a short, meaningful chat title using 1-5 words maximum that captures the essence of this prompt.",
118        req.prompt
119    );
120
121    let llm_worker = state.llm_worker.clone();
122    match llm_worker.generate_title(&title_instruction, req.max_tokens.min(20)).await {
123        Ok(title) => {
124            info!("Local LLM title: '{title}'");
125            Ok(Json(GenerateTitleResponse { title }))
126        }
127        Err(e) => {
128            info!("Title generation failed: {e}");
129            Err((
130                StatusCode::INTERNAL_SERVER_ERROR,
131                Json(ErrorResponse {
132                    error: format!("Title generation failed: {e}"),
133                }),
134            ))
135        }
136    }
137}