1use crate::settings::{
2 Settings, ServerSettings, LlmBackendSettings, ApiSettings,
3 OpenAiApiSettings, OllamaApiSettings, AnthropicApiSettings,
4 ClientAdapterSettings, ZedAdapterSettings,
5};
6
7pub struct CodexApp;
9
10impl CodexApp {
11 pub fn generate_config(cli_api_key: Option<&str>) -> Settings {
13 Settings {
14 server: ServerSettings {
15 host: "0.0.0.0".to_string(),
16 port: 8088,
17 log_level: "info".to_string(),
18 },
19 llm_backend: LlmBackendSettings::Zhipu {
20 api_key: String::new(),
22 base_url: Some("https://open.bigmodel.cn/api/paas/v4".to_string()),
23 model: "glm-4-flash".to_string(),
24 },
25 apis: ApiSettings {
26 openai: Some(OpenAiApiSettings {
27 enabled: true,
28 path: "/v1".to_string(),
29 api_key_header: Some("Authorization".to_string()),
30 api_key: cli_api_key.map(|k| k.to_string()),
32 }),
33 ollama: Some(OllamaApiSettings {
34 enabled: false,
35 path: "/ollama".to_string(),
36 api_key_header: None,
37 api_key: None,
38 }),
39 anthropic: Some(AnthropicApiSettings {
40 enabled: false,
41 path: "/anthropic".to_string(),
42 api_key_header: None,
43 }),
44 },
45 client_adapters: Some(ClientAdapterSettings {
46 default_adapter: Some("openai".to_string()),
47 force_adapter: Some("openai".to_string()),
48 zed: Some(ZedAdapterSettings {
49 enabled: false,
50 force_images_field: Some(false),
51 preferred_format: Some("json".to_string()),
52 }),
53 }),
54 }
55 }
56}
57