1use serde::{Deserialize, Serialize};
2
3#[derive(Debug, Clone, Serialize, Deserialize)]
4pub struct Settings {
5 pub server: ServerSettings,
6 pub llm_backend: LlmBackendSettings,
7 pub apis: ApiSettings,
8 pub client_adapters: Option<ClientAdapterSettings>,
9}
10
11#[derive(Debug, Clone, Serialize, Deserialize)]
12pub struct ServerSettings {
13 pub host: String,
14 pub port: u16,
15 pub log_level: String,
16}
17
18#[derive(Debug, Clone, Serialize, Deserialize)]
19#[serde(tag = "type")]
20pub enum LlmBackendSettings {
21 OpenAI {
22 api_key: String,
23 base_url: Option<String>,
24 model: String,
25 },
26 Anthropic {
27 api_key: String,
28 model: String,
29 },
30 Ollama {
31 base_url: Option<String>,
32 model: String,
33 },
34 Zhipu {
35 api_key: String,
36 base_url: Option<String>,
37 model: String,
38 },
39 Aliyun {
40 api_key: String,
41 model: String,
42 },
43 Volcengine {
44 api_key: String,
45 model: String,
46 },
47 Tencent {
48 api_key: String,
49 model: String,
50 },
51 Longcat {
52 api_key: String,
53 model: String,
54 },
55}
56
57impl LlmBackendSettings {
58 pub fn get_model(&self) -> String {
60 match self {
61 LlmBackendSettings::OpenAI { model, .. } => model.clone(),
62 LlmBackendSettings::Anthropic { model, .. } => model.clone(),
63 LlmBackendSettings::Ollama { model, .. } => model.clone(),
64 LlmBackendSettings::Zhipu { model, .. } => model.clone(),
65 LlmBackendSettings::Aliyun { model, .. } => model.clone(),
66 LlmBackendSettings::Volcengine { model, .. } => model.clone(),
67 LlmBackendSettings::Tencent { model, .. } => model.clone(),
68 LlmBackendSettings::Longcat { model, .. } => model.clone(),
69 }
70 }
71}
72
73#[derive(Debug, Clone, Serialize, Deserialize)]
74pub struct ApiSettings {
75 pub ollama: Option<OllamaApiSettings>,
76 pub openai: Option<OpenAiApiSettings>,
77 pub anthropic: Option<AnthropicApiSettings>,
78}
79
80#[derive(Debug, Clone, Serialize, Deserialize)]
81pub struct ClientAdapterSettings {
82 pub default_adapter: Option<String>,
84 pub force_adapter: Option<String>,
86 pub zed: Option<ZedAdapterSettings>,
88}
89
90#[derive(Debug, Clone, Serialize, Deserialize)]
91pub struct ZedAdapterSettings {
92 pub enabled: bool,
94 pub force_images_field: Option<bool>,
96 pub preferred_format: Option<String>,
98}
99
100
101
102#[derive(Debug, Clone, Serialize, Deserialize)]
103pub struct OllamaApiSettings {
104 pub enabled: bool,
105 pub path: String,
106 pub api_key_header: Option<String>,
107 pub api_key: Option<String>,
108}
109
110#[derive(Debug, Clone, Serialize, Deserialize)]
111pub struct OpenAiApiSettings {
112 pub enabled: bool,
113 pub path: String,
114 pub api_key_header: Option<String>,
115 pub api_key: Option<String>,
116}
117
118#[derive(Debug, Clone, Serialize, Deserialize)]
119pub struct AnthropicApiSettings {
120 pub enabled: bool,
121 pub path: String,
122 pub api_key_header: Option<String>,
123}
124
125impl Default for Settings {
126 fn default() -> Self {
127 Self {
128 server: ServerSettings {
129 host: "127.0.0.1".to_string(),
130 port: 8080,
131 log_level: "info".to_string(),
132 },
133 llm_backend: LlmBackendSettings::Ollama {
134 base_url: Some("http://localhost:11434".to_string()),
135 model: "llama2".to_string(),
136 },
137 apis: ApiSettings {
138 ollama: Some(OllamaApiSettings {
139 enabled: true,
140 path: "/ollama".to_string(),
141 api_key_header: None,
142 api_key: None,
143 }),
144 openai: Some(OpenAiApiSettings {
145 enabled: true,
146 path: "/v1".to_string(),
147 api_key_header: None,
148 api_key: None,
149 }),
150 anthropic: Some(AnthropicApiSettings {
151 enabled: true,
152 path: "/anthropic".to_string(),
153 api_key_header: None,
154 }),
155 },
156 client_adapters: None,
157 }
158 }
159}
160
161impl Settings {
162 }
165