helios_engine/
config.rs

1use serde::{Deserialize, Serialize};
2use std::fs;
3use std::path::Path;
4use crate::error::{HeliosError, Result};
5
6#[derive(Debug, Clone, Serialize, Deserialize)]
7pub struct Config {
8    pub llm: LLMConfig,
9}
10
11#[derive(Debug, Clone, Serialize, Deserialize)]
12pub struct LLMConfig {
13    pub model_name: String,
14    pub base_url: String,
15    pub api_key: String,
16    #[serde(default = "default_temperature")]
17    pub temperature: f32,
18    #[serde(default = "default_max_tokens")]
19    pub max_tokens: u32,
20}
21
22fn default_temperature() -> f32 {
23    0.7
24}
25
26fn default_max_tokens() -> u32 {
27    2048
28}
29
30impl Config {
31    /// Load configuration from a TOML file
32    pub fn from_file<P: AsRef<Path>>(path: P) -> Result<Self> {
33        let content = fs::read_to_string(path)
34            .map_err(|e| HeliosError::ConfigError(format!("Failed to read config file: {}", e)))?;
35        
36        let config: Config = toml::from_str(&content)?;
37        Ok(config)
38    }
39
40    /// Create a default configuration
41    pub fn new_default() -> Self {
42        Self {
43            llm: LLMConfig {
44                model_name: "gpt-3.5-turbo".to_string(),
45                base_url: "https://api.openai.com/v1".to_string(),
46                api_key: "your-api-key-here".to_string(),
47                temperature: 0.7,
48                max_tokens: 2048,
49            },
50        }
51    }
52
53    /// Save configuration to a TOML file
54    pub fn save<P: AsRef<Path>>(&self, path: P) -> Result<()> {
55        let content = toml::to_string_pretty(self)
56            .map_err(|e| HeliosError::ConfigError(format!("Failed to serialize config: {}", e)))?;
57        
58        fs::write(path, content)
59            .map_err(|e| HeliosError::ConfigError(format!("Failed to write config file: {}", e)))?;
60        
61        Ok(())
62    }
63}