Skip to main content

mofa_foundation/
config.rs

1//! Agent 配置文件解析
2//!
3//! 支持多种配置格式: YAML, TOML, JSON, INI, RON, JSON5
4//!
5//! # 示例配置 (agent.yml, agent.toml, agent.json, etc.)
6//!
7//! ```yaml
8//! agent:
9//!   id: "my-agent-001"
10//!   name: "My LLM Agent"
11//!
12//! llm:
13//!   provider: openai          # openai, ollama, azure
14//!   model: gpt-4o
15//!   api_key: ${OPENAI_API_KEY}  # 支持环境变量
16//!   base_url: null            # 可选,用于自定义 endpoint
17//!   temperature: 0.7
18//!   max_tokens: 4096
19//!   system_prompt: "You are a helpful assistant."
20//!
21//! tools:
22//!   - name: web_search
23//!     enabled: true
24//!   - name: calculator
25//!     enabled: true
26//!
27//! runtime:
28//!   max_concurrent_tasks: 10
29//!   default_timeout_secs: 30
30//! ```
31
32use mofa_kernel::config::{from_str, load_config};
33use serde::{Deserialize, Serialize};
34use std::collections::HashMap;
35use std::path::Path;
36
37/// Agent 配置文件根结构
38#[derive(Debug, Clone, Serialize, Deserialize)]
39pub struct AgentYamlConfig {
40    /// Agent 基本信息
41    pub agent: AgentInfo,
42    /// LLM 配置
43    #[serde(default)]
44    pub llm: Option<LLMYamlConfig>,
45    /// 工具配置
46    #[serde(default)]
47    pub tools: Option<Vec<ToolConfig>>,
48    /// 运行时配置
49    #[serde(default)]
50    pub runtime: Option<RuntimeConfig>,
51    /// 输入端口
52    #[serde(default)]
53    pub inputs: Option<Vec<String>>,
54    /// 输出端口
55    #[serde(default)]
56    pub outputs: Option<Vec<String>>,
57}
58
59/// Agent 基本信息
60#[derive(Debug, Clone, Serialize, Deserialize)]
61pub struct AgentInfo {
62    /// Agent ID
63    pub id: String,
64    /// Agent 名称
65    pub name: String,
66    /// 描述
67    #[serde(default)]
68    pub description: Option<String>,
69    /// 能力列表
70    #[serde(default)]
71    pub capabilities: Vec<String>,
72}
73
74/// LLM 配置
75#[derive(Debug, Clone, Serialize, Deserialize)]
76pub struct LLMYamlConfig {
77    /// Provider 类型: openai, ollama, azure, compatible, anthropic, gemini
78    #[serde(default = "default_provider")]
79    pub provider: String,
80    /// 模型名称
81    #[serde(default)]
82    pub model: Option<String>,
83    /// API Key (支持 ${ENV_VAR} 语法)
84    #[serde(default)]
85    pub api_key: Option<String>,
86    /// API Base URL
87    #[serde(default)]
88    pub base_url: Option<String>,
89    /// Azure deployment name
90    #[serde(default)]
91    pub deployment: Option<String>,
92    /// 温度参数
93    #[serde(default)]
94    pub temperature: Option<f32>,
95    /// 最大 token 数
96    #[serde(default)]
97    pub max_tokens: Option<u32>,
98    /// 系统提示词
99    #[serde(default)]
100    pub system_prompt: Option<String>,
101}
102
103fn default_provider() -> String {
104    "openai".to_string()
105}
106
107impl Default for LLMYamlConfig {
108    fn default() -> Self {
109        Self {
110            provider: "openai".to_string(),
111            model: None,
112            api_key: None,
113            base_url: None,
114            deployment: None,
115            temperature: Some(0.7),
116            max_tokens: Some(4096),
117            system_prompt: None,
118        }
119    }
120}
121
122/// 工具配置
123#[derive(Debug, Clone, Serialize, Deserialize)]
124pub struct ToolConfig {
125    /// 工具名称
126    pub name: String,
127    /// 是否启用
128    #[serde(default = "default_true")]
129    pub enabled: bool,
130    /// 工具特定配置
131    #[serde(default)]
132    pub config: HashMap<String, serde_json::Value>,
133}
134
135fn default_true() -> bool {
136    true
137}
138
139/// 运行时配置
140#[derive(Debug, Clone, Serialize, Deserialize)]
141pub struct RuntimeConfig {
142    /// 最大并发任务数
143    #[serde(default = "default_max_concurrent")]
144    pub max_concurrent_tasks: usize,
145    /// 默认超时(秒)
146    #[serde(default = "default_timeout")]
147    pub default_timeout_secs: u64,
148}
149
150fn default_max_concurrent() -> usize {
151    10
152}
153
154fn default_timeout() -> u64 {
155    30
156}
157
158impl Default for RuntimeConfig {
159    fn default() -> Self {
160        Self {
161            max_concurrent_tasks: 10,
162            default_timeout_secs: 30,
163        }
164    }
165}
166
167impl AgentYamlConfig {
168    /// 从文件加载配置 (自动检测格式)
169    pub fn from_file(path: impl AsRef<Path>) -> anyhow::Result<Self> {
170        let path_str = path.as_ref().to_string_lossy().to_string();
171        load_config(&path_str).map_err(|e| anyhow::anyhow!("Failed to load config: {}", e))
172    }
173
174    /// 从字符串解析配置 (指定格式)
175    pub fn from_str_with_format(content: &str, format: &str) -> anyhow::Result<Self> {
176        use config::FileFormat;
177
178        let file_format = match format.to_lowercase().as_str() {
179            "yaml" | "yml" => FileFormat::Yaml,
180            "toml" => FileFormat::Toml,
181            "json" => FileFormat::Json,
182            "ini" => FileFormat::Ini,
183            "ron" => FileFormat::Ron,
184            "json5" => FileFormat::Json5,
185            _ => return Err(anyhow::anyhow!("Unsupported config format: {}", format)),
186        };
187
188        from_str(content, file_format).map_err(|e| anyhow::anyhow!("Failed to parse config: {}", e))
189    }
190
191    /// 从字符串解析配置 (自动检测为 YAML)
192    pub fn from_str(content: &str) -> anyhow::Result<Self> {
193        Self::from_str_with_format(content, "yaml")
194    }
195}