1pub mod deep;
8pub mod engine;
9pub mod ollama;
10pub mod sidecar;
11
12pub use engine::{classify_by_rules, Classification, ClassificationEngine, ClassificationSource};
13use serde::{Deserialize, Serialize};
14
15#[derive(Debug, thiserror::Error)]
17pub enum LlmError {
18 #[error("HTTP request failed: {0}")]
19 Http(#[from] reqwest::Error),
20 #[error("LLM backend unavailable: {0}")]
21 Unavailable(String),
22 #[error("Parse error: {0}")]
23 Parse(String),
24 #[error("Model not found: {0}")]
25 ModelNotFound(String),
26}
27
28impl From<LlmError> for String {
29 fn from(e: LlmError) -> Self {
30 e.to_string()
31 }
32}
33
34#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
36pub enum LlmBackend {
37 Ollama,
39 Disabled,
41}
42
43#[derive(Debug, Clone)]
45pub struct GenerateRequest {
46 pub prompt: String,
47 pub max_tokens: u32,
48 pub temperature: f64,
49}
50
51#[derive(Debug, Clone)]
53pub struct GenerateResponse {
54 pub text: String,
55 pub tokens_used: u32,
56}
57
58#[derive(Debug, Clone, Serialize)]
60pub struct LlmStatus {
61 pub available: bool,
62 pub backend: String,
63 pub model: Option<String>,
64 pub error: Option<String>,
65}