admin_config/
llm_config.rs

1use serde::{Deserialize, Serialize};
2
3#[derive(Debug, Clone, Serialize, Deserialize)]
4pub struct LlmConfig {
5    /// LLM 提供商 (openai/azure/anthropic)
6    pub provider: String,
7    /// API Key
8    pub api_key: String,
9    /// API Base URL
10    pub api_base: Option<String>,
11    /// 模型名称
12    pub model: String,
13    /// 最大 Token 数
14    pub max_tokens: u32,
15    /// 温度系数
16    pub temperature: f32,
17}
18
19impl Default for LlmConfig {
20    fn default() -> Self {
21        Self {
22            provider: "openai".to_string(),
23            api_key: "".to_string(),
24            api_base: None,
25            model: "gpt-3.5-turbo".to_string(),
26            max_tokens: 2048,
27            temperature: 0.7,
28        }
29    }
30}