admin_config/
llm_config.rs1use serde::{Deserialize, Serialize};
2
3#[derive(Debug, Clone, Serialize, Deserialize)]
4pub struct LlmConfig {
5 pub provider: String,
7 pub api_key: String,
9 pub api_base: Option<String>,
11 pub model: String,
13 pub max_tokens: u32,
15 pub temperature: f32,
17}
18
19impl Default for LlmConfig {
20 fn default() -> Self {
21 Self {
22 provider: "openai".to_string(),
23 api_key: "".to_string(),
24 api_base: None,
25 model: "gpt-3.5-turbo".to_string(),
26 max_tokens: 2048,
27 temperature: 0.7,
28 }
29 }
30}