1use serde::{Deserialize, Serialize};
2
3#[derive(Debug, Clone, Serialize, Deserialize)]
4pub struct Settings {
5 pub server: ServerSettings,
6 pub llm_backend: LlmBackendSettings,
7 pub apis: ApiSettings,
8 pub client_adapters: Option<ClientAdapterSettings>,
9}
10
11#[derive(Debug, Clone, Serialize, Deserialize)]
12pub struct ServerSettings {
13 pub host: String,
14 pub port: u16,
15 pub log_level: String,
16}
17
18#[derive(Debug, Clone, Serialize, Deserialize)]
19#[serde(tag = "type")]
20pub enum LlmBackendSettings {
21 OpenAI {
22 api_key: String,
23 base_url: Option<String>,
24 model: String,
25 },
26 Anthropic {
27 api_key: String,
28 model: String,
29 },
30 Ollama {
31 base_url: Option<String>,
32 model: String,
33 },
34 Zhipu {
35 api_key: String,
36 base_url: Option<String>,
37 model: String,
38 },
39 Aliyun {
40 api_key: String,
41 model: String,
42 },
43 Volcengine {
44 api_key: String,
45 model: String,
46 },
47 Tencent {
48 api_key: String,
49 model: String,
50 },
51}
52
53impl LlmBackendSettings {
54 pub fn get_model(&self) -> String {
56 match self {
57 LlmBackendSettings::OpenAI { model, .. } => model.clone(),
58 LlmBackendSettings::Anthropic { model, .. } => model.clone(),
59 LlmBackendSettings::Ollama { model, .. } => model.clone(),
60 LlmBackendSettings::Zhipu { model, .. } => model.clone(),
61 LlmBackendSettings::Aliyun { model, .. } => model.clone(),
62 LlmBackendSettings::Volcengine { model, .. } => model.clone(),
63 LlmBackendSettings::Tencent { model, .. } => model.clone(),
64 }
65 }
66}
67
68#[derive(Debug, Clone, Serialize, Deserialize)]
69pub struct ApiSettings {
70 pub ollama: Option<OllamaApiSettings>,
71 pub openai: Option<OpenAiApiSettings>,
72 pub anthropic: Option<AnthropicApiSettings>,
73}
74
75#[derive(Debug, Clone, Serialize, Deserialize)]
76pub struct ClientAdapterSettings {
77 pub default_adapter: Option<String>,
79 pub force_adapter: Option<String>,
81 pub zed: Option<ZedAdapterSettings>,
83}
84
85#[derive(Debug, Clone, Serialize, Deserialize)]
86pub struct ZedAdapterSettings {
87 pub enabled: bool,
89 pub force_images_field: Option<bool>,
91 pub preferred_format: Option<String>,
93}
94
95
96
97#[derive(Debug, Clone, Serialize, Deserialize)]
98pub struct OllamaApiSettings {
99 pub enabled: bool,
100 pub path: String,
101 pub api_key_header: Option<String>,
102 pub api_key: Option<String>,
103}
104
105#[derive(Debug, Clone, Serialize, Deserialize)]
106pub struct OpenAiApiSettings {
107 pub enabled: bool,
108 pub path: String,
109 pub api_key_header: Option<String>,
110 pub api_key: Option<String>,
111}
112
113#[derive(Debug, Clone, Serialize, Deserialize)]
114pub struct AnthropicApiSettings {
115 pub enabled: bool,
116 pub path: String,
117 pub api_key_header: Option<String>,
118}
119
120impl Default for Settings {
121 fn default() -> Self {
122 Self {
123 server: ServerSettings {
124 host: "127.0.0.1".to_string(),
125 port: 8080,
126 log_level: "info".to_string(),
127 },
128 llm_backend: LlmBackendSettings::Ollama {
129 base_url: Some("http://localhost:11434".to_string()),
130 model: "llama2".to_string(),
131 },
132 apis: ApiSettings {
133 ollama: Some(OllamaApiSettings {
134 enabled: true,
135 path: "/ollama".to_string(),
136 api_key_header: None,
137 api_key: None,
138 }),
139 openai: Some(OpenAiApiSettings {
140 enabled: true,
141 path: "/v1".to_string(),
142 api_key_header: None,
143 api_key: None,
144 }),
145 anthropic: Some(AnthropicApiSettings {
146 enabled: true,
147 path: "/anthropic".to_string(),
148 api_key_header: None,
149 }),
150 },
151 client_adapters: None,
152 }
153 }
154}
155
156impl Settings {
157 }
160