1use serde::{Deserialize, Serialize};
2
3#[derive(Debug, Clone, Serialize, Deserialize)]
4pub struct Settings {
5 pub server: ServerSettings,
6 pub llm_backend: LlmBackendSettings,
7 pub apis: ApiSettings,
8 pub client_adapters: Option<ClientAdapterSettings>,
9}
10
11#[derive(Debug, Clone, Serialize, Deserialize)]
12pub struct ServerSettings {
13 pub host: String,
14 pub port: u16,
15 pub log_level: String,
16}
17
18#[derive(Debug, Clone, Serialize, Deserialize)]
19#[serde(tag = "type")]
20pub enum LlmBackendSettings {
21 OpenAI {
22 api_key: String,
23 base_url: Option<String>,
24 model: String,
25 },
26 Anthropic {
27 api_key: String,
28 model: String,
29 },
30 Ollama {
31 base_url: Option<String>,
32 model: String,
33 },
34 Zhipu {
35 api_key: String,
36 base_url: Option<String>,
37 model: String,
38 },
39 Aliyun {
40 api_key: String,
41 model: String,
42 },
43 Volcengine {
44 api_key: String,
45 model: String,
46 },
47 Tencent {
48 api_key: String,
49 model: String,
50 },
51 Longcat {
52 api_key: String,
53 model: String,
54 },
55 Moonshot {
56 api_key: String,
57 model: String,
58 },
59 Minimax {
60 api_key: String,
61 model: String,
62 },
63}
64
65impl LlmBackendSettings {
66 pub fn get_model(&self) -> String {
68 match self {
69 LlmBackendSettings::OpenAI { model, .. } => model.clone(),
70 LlmBackendSettings::Anthropic { model, .. } => model.clone(),
71 LlmBackendSettings::Ollama { model, .. } => model.clone(),
72 LlmBackendSettings::Zhipu { model, .. } => model.clone(),
73 LlmBackendSettings::Aliyun { model, .. } => model.clone(),
74 LlmBackendSettings::Volcengine { model, .. } => model.clone(),
75 LlmBackendSettings::Tencent { model, .. } => model.clone(),
76 LlmBackendSettings::Longcat { model, .. } => model.clone(),
77 LlmBackendSettings::Moonshot { model, .. } => model.clone(),
78 LlmBackendSettings::Minimax { model, .. } => model.clone(),
79 }
80 }
81}
82
83#[derive(Debug, Clone, Serialize, Deserialize)]
84pub struct ApiSettings {
85 pub ollama: Option<OllamaApiSettings>,
86 pub openai: Option<OpenAiApiSettings>,
87 pub anthropic: Option<AnthropicApiSettings>,
88}
89
90#[derive(Debug, Clone, Serialize, Deserialize)]
91pub struct ClientAdapterSettings {
92 pub default_adapter: Option<String>,
94 pub force_adapter: Option<String>,
96 pub zed: Option<ZedAdapterSettings>,
98}
99
100#[derive(Debug, Clone, Serialize, Deserialize)]
101pub struct ZedAdapterSettings {
102 pub enabled: bool,
104 pub force_images_field: Option<bool>,
106 pub preferred_format: Option<String>,
108}
109
110
111
112#[derive(Debug, Clone, Serialize, Deserialize)]
113pub struct OllamaApiSettings {
114 pub enabled: bool,
115 pub path: String,
116 pub api_key_header: Option<String>,
117 pub api_key: Option<String>,
118}
119
120#[derive(Debug, Clone, Serialize, Deserialize)]
121pub struct OpenAiApiSettings {
122 pub enabled: bool,
123 pub path: String,
124 pub api_key_header: Option<String>,
125 pub api_key: Option<String>,
126}
127
128#[derive(Debug, Clone, Serialize, Deserialize)]
129pub struct AnthropicApiSettings {
130 pub enabled: bool,
131 pub path: String,
132 pub api_key_header: Option<String>,
133}
134
135impl Default for Settings {
136 fn default() -> Self {
137 Self {
138 server: ServerSettings {
139 host: "127.0.0.1".to_string(),
140 port: 8080,
141 log_level: "info".to_string(),
142 },
143 llm_backend: LlmBackendSettings::Ollama {
144 base_url: Some("http://localhost:11434".to_string()),
145 model: "llama2".to_string(),
146 },
147 apis: ApiSettings {
148 ollama: Some(OllamaApiSettings {
149 enabled: true,
150 path: "/ollama".to_string(),
151 api_key_header: None,
152 api_key: None,
153 }),
154 openai: Some(OpenAiApiSettings {
155 enabled: true,
156 path: "/v1".to_string(),
157 api_key_header: None,
158 api_key: None,
159 }),
160 anthropic: Some(AnthropicApiSettings {
161 enabled: true,
162 path: "/anthropic".to_string(),
163 api_key_header: None,
164 }),
165 },
166 client_adapters: None,
167 }
168 }
169}
170
171impl Settings {
172 }
175