llm_link/
settings.rs

1use serde::{Deserialize, Serialize};
2
3#[derive(Debug, Clone, Serialize, Deserialize)]
4pub struct Settings {
5    pub server: ServerSettings,
6    pub llm_backend: LlmBackendSettings,
7    pub apis: ApiSettings,
8    pub client_adapters: Option<ClientAdapterSettings>,
9}
10
11#[derive(Debug, Clone, Serialize, Deserialize)]
12pub struct ServerSettings {
13    pub host: String,
14    pub port: u16,
15    pub log_level: String,
16}
17
18#[derive(Debug, Clone, Serialize, Deserialize)]
19#[serde(tag = "type")]
20pub enum LlmBackendSettings {
21    OpenAI {
22        api_key: String,
23        base_url: Option<String>,
24        model: String,
25    },
26    Anthropic {
27        api_key: String,
28        model: String,
29    },
30    Ollama {
31        base_url: Option<String>,
32        model: String,
33    },
34    Zhipu {
35        api_key: String,
36        base_url: Option<String>,
37        model: String,
38    },
39    Aliyun {
40        api_key: String,
41        model: String,
42    },
43    Volcengine {
44        api_key: String,
45        model: String,
46    },
47    Tencent {
48        api_key: String,
49        model: String,
50    },
51    Longcat {
52        api_key: String,
53        model: String,
54    },
55    Moonshot {
56        api_key: String,
57        model: String,
58    },
59}
60
61impl LlmBackendSettings {
62    /// 获取当前配置的模型名称
63    pub fn get_model(&self) -> String {
64        match self {
65            LlmBackendSettings::OpenAI { model, .. } => model.clone(),
66            LlmBackendSettings::Anthropic { model, .. } => model.clone(),
67            LlmBackendSettings::Ollama { model, .. } => model.clone(),
68            LlmBackendSettings::Zhipu { model, .. } => model.clone(),
69            LlmBackendSettings::Aliyun { model, .. } => model.clone(),
70            LlmBackendSettings::Volcengine { model, .. } => model.clone(),
71            LlmBackendSettings::Tencent { model, .. } => model.clone(),
72            LlmBackendSettings::Longcat { model, .. } => model.clone(),
73            LlmBackendSettings::Moonshot { model, .. } => model.clone(),
74        }
75    }
76}
77
78#[derive(Debug, Clone, Serialize, Deserialize)]
79pub struct ApiSettings {
80    pub ollama: Option<OllamaApiSettings>,
81    pub openai: Option<OpenAiApiSettings>,
82    pub anthropic: Option<AnthropicApiSettings>,
83}
84
85#[derive(Debug, Clone, Serialize, Deserialize)]
86pub struct ClientAdapterSettings {
87    /// 默认客户端适配模式
88    pub default_adapter: Option<String>,
89    /// 强制客户端适配模式(忽略自动检测)
90    pub force_adapter: Option<String>,
91    /// Zed.dev 特定配置
92    pub zed: Option<ZedAdapterSettings>,
93}
94
95#[derive(Debug, Clone, Serialize, Deserialize)]
96pub struct ZedAdapterSettings {
97    /// 是否启用 Zed.dev 适配
98    pub enabled: bool,
99    /// 是否强制添加 images 字段
100    pub force_images_field: Option<bool>,
101    /// 首选响应格式
102    pub preferred_format: Option<String>,
103}
104
105
106
107#[derive(Debug, Clone, Serialize, Deserialize)]
108pub struct OllamaApiSettings {
109    pub enabled: bool,
110    pub path: String,
111    pub api_key_header: Option<String>,
112    pub api_key: Option<String>,
113}
114
115#[derive(Debug, Clone, Serialize, Deserialize)]
116pub struct OpenAiApiSettings {
117    pub enabled: bool,
118    pub path: String,
119    pub api_key_header: Option<String>,
120    pub api_key: Option<String>,
121}
122
123#[derive(Debug, Clone, Serialize, Deserialize)]
124pub struct AnthropicApiSettings {
125    pub enabled: bool,
126    pub path: String,
127    pub api_key_header: Option<String>,
128}
129
130impl Default for Settings {
131    fn default() -> Self {
132        Self {
133            server: ServerSettings {
134                host: "127.0.0.1".to_string(),
135                port: 8080,
136                log_level: "info".to_string(),
137            },
138            llm_backend: LlmBackendSettings::Ollama {
139                base_url: Some("http://localhost:11434".to_string()),
140                model: "llama2".to_string(),
141            },
142            apis: ApiSettings {
143                ollama: Some(OllamaApiSettings {
144                    enabled: true,
145                    path: "/ollama".to_string(),
146                    api_key_header: None,
147                    api_key: None,
148                }),
149                openai: Some(OpenAiApiSettings {
150                    enabled: true,
151                    path: "/v1".to_string(),
152                    api_key_header: None,
153                    api_key: None,
154                }),
155                anthropic: Some(AnthropicApiSettings {
156                    enabled: true,
157                    path: "/anthropic".to_string(),
158                    api_key_header: None,
159                }),
160            },
161            client_adapters: None,
162        }
163    }
164}
165
166impl Settings {
167    // Settings are now generated by AppConfigGenerator only
168    // No file-based configuration loading needed
169}
170