1use crate::settings::{
2 Settings, ServerSettings, LlmBackendSettings, ApiSettings,
3 OpenAiApiSettings, OllamaApiSettings, AnthropicApiSettings,
4 ClientAdapterSettings, ZedAdapterSettings,
5};
6use super::AppConfigGenerator;
7
8pub struct CodexApp;
10
11impl CodexApp {
12 pub fn generate_config(cli_api_key: Option<&str>) -> Settings {
14 Settings {
15 server: ServerSettings {
16 host: "0.0.0.0".to_string(),
17 port: 8088,
18 log_level: "info".to_string(),
19 },
20 llm_backend: LlmBackendSettings::Zhipu {
21 api_key: AppConfigGenerator::resolve_env_var("${ZHIPU_API_KEY}", cli_api_key),
22 base_url: Some("https://open.bigmodel.cn/api/paas/v4".to_string()),
23 model: "glm-4-flash".to_string(),
24 },
25 apis: ApiSettings {
26 openai: Some(OpenAiApiSettings {
27 enabled: true,
28 path: "/v1".to_string(),
29 api_key_header: Some("Authorization".to_string()),
30 api_key: Some(AppConfigGenerator::resolve_env_var("${LLM_LINK_API_KEY}", cli_api_key)),
31 }),
32 ollama: Some(OllamaApiSettings {
33 enabled: false,
34 path: "/ollama".to_string(),
35 api_key_header: None,
36 api_key: None,
37 }),
38 anthropic: Some(AnthropicApiSettings {
39 enabled: false,
40 path: "/anthropic".to_string(),
41 api_key_header: None,
42 }),
43 },
44 client_adapters: Some(ClientAdapterSettings {
45 default_adapter: Some("openai".to_string()),
46 force_adapter: Some("openai".to_string()),
47 zed: Some(ZedAdapterSettings {
48 enabled: false,
49 force_images_field: Some(false),
50 preferred_format: Some("json".to_string()),
51 }),
52 }),
53 }
54 }
55}
56