1use serde::{Deserialize, Serialize};
2
3#[derive(Debug, Clone, Serialize, Deserialize)]
4pub struct Settings {
5 pub server: ServerSettings,
6 pub llm_backend: LlmBackendSettings,
7 pub apis: ApiSettings,
8 pub client_adapters: Option<ClientAdapterSettings>,
9}
10
11#[derive(Debug, Clone, Serialize, Deserialize)]
12pub struct ServerSettings {
13 pub host: String,
14 pub port: u16,
15 pub log_level: String,
16}
17
18#[derive(Debug, Clone, Serialize, Deserialize)]
19#[serde(tag = "type")]
20pub enum LlmBackendSettings {
21 OpenAI {
22 api_key: String,
23 base_url: Option<String>,
24 model: String,
25 },
26 Anthropic {
27 api_key: String,
28 model: String,
29 },
30 Ollama {
31 base_url: Option<String>,
32 model: String,
33 },
34 Zhipu {
35 api_key: String,
36 base_url: Option<String>,
37 model: String,
38 },
39 Aliyun {
40 api_key: String,
41 model: String,
42 },
43 Volcengine {
44 api_key: String,
45 model: String,
46 },
47 Tencent {
48 api_key: String,
49 model: String,
50 },
51}
52
53#[derive(Debug, Clone, Serialize, Deserialize)]
54pub struct ApiSettings {
55 pub ollama: Option<OllamaApiSettings>,
56 pub openai: Option<OpenAiApiSettings>,
57 pub anthropic: Option<AnthropicApiSettings>,
58}
59
60#[derive(Debug, Clone, Serialize, Deserialize)]
61pub struct ClientAdapterSettings {
62 pub default_adapter: Option<String>,
64 pub force_adapter: Option<String>,
66 pub zed: Option<ZedAdapterSettings>,
68}
69
70#[derive(Debug, Clone, Serialize, Deserialize)]
71pub struct ZedAdapterSettings {
72 pub enabled: bool,
74 pub force_images_field: Option<bool>,
76 pub preferred_format: Option<String>,
78}
79
80
81
82#[derive(Debug, Clone, Serialize, Deserialize)]
83pub struct OllamaApiSettings {
84 pub enabled: bool,
85 pub path: String,
86 pub api_key_header: Option<String>,
87 pub api_key: Option<String>,
88}
89
90#[derive(Debug, Clone, Serialize, Deserialize)]
91pub struct OpenAiApiSettings {
92 pub enabled: bool,
93 pub path: String,
94 pub api_key_header: Option<String>,
95 pub api_key: Option<String>,
96}
97
98#[derive(Debug, Clone, Serialize, Deserialize)]
99pub struct AnthropicApiSettings {
100 pub enabled: bool,
101 pub path: String,
102 pub api_key_header: Option<String>,
103}
104
105impl Default for Settings {
106 fn default() -> Self {
107 Self {
108 server: ServerSettings {
109 host: "127.0.0.1".to_string(),
110 port: 8080,
111 log_level: "info".to_string(),
112 },
113 llm_backend: LlmBackendSettings::Ollama {
114 base_url: Some("http://localhost:11434".to_string()),
115 model: "llama2".to_string(),
116 },
117 apis: ApiSettings {
118 ollama: Some(OllamaApiSettings {
119 enabled: true,
120 path: "/ollama".to_string(),
121 api_key_header: None,
122 api_key: None,
123 }),
124 openai: Some(OpenAiApiSettings {
125 enabled: true,
126 path: "/v1".to_string(),
127 api_key_header: None,
128 api_key: None,
129 }),
130 anthropic: Some(AnthropicApiSettings {
131 enabled: true,
132 path: "/anthropic".to_string(),
133 api_key_header: None,
134 }),
135 },
136 client_adapters: None,
137 }
138 }
139}
140
141impl Settings {
142 }
145