openfunctions_rs/core/config.rs
1//! Configuration management for OpenFunctions
2//!
3//! This module defines the structures for managing project configuration,
4//! including settings for the project, runtime, AI services, and environment.
5
6use anyhow::Result;
7use serde::{Deserialize, Serialize};
8use std::collections::HashMap;
9use std::path::{Path, PathBuf};
10
11/// Main configuration structure for the OpenFunctions project.
12///
13/// This struct holds all configuration settings, loaded from a `openfunctions.toml`
14/// file. It is organized into sections for better management of settings.
15#[derive(Debug, Clone, Serialize, Deserialize)]
16pub struct Config {
17 /// Project-specific settings, like name, version, and important directories.
18 pub project: ProjectConfig,
19
20 /// Settings related to the execution runtime for tools and agents.
21 pub runtime: RuntimeConfig,
22
23 /// Configuration for AI services, such as OpenAI.
24 /// This is only available when the "openai" feature is enabled.
25 #[cfg(feature = "openai")]
26 pub ai: Option<AiConfig>,
27
28 /// A list of directories where tool definitions are stored.
29 pub tool_dirs: Vec<PathBuf>,
30
31 /// A list of directories where agent definitions are stored.
32 pub agent_dirs: Vec<PathBuf>,
33
34 /// Environment variables to be made available to tools and agents during execution.
35 pub env: HashMap<String, String>,
36}
37
38impl Config {
39 /// Loads the configuration for the project.
40 ///
41 /// The configuration is loaded from a file. The following paths are checked in order:
42 /// 1. A path explicitly provided as an argument.
43 /// 2. `openfunctions.toml` in the current directory.
44 /// 3. `.openfunctions/config.toml` in the current directory.
45 ///
46 /// If no configuration file is found in these locations, a default configuration is
47 /// created and returned.
48 pub async fn load(path: Option<PathBuf>) -> Result<Self> {
49 if let Some(path) = path {
50 Self::from_file(&path).await
51 } else if Path::new("openfunctions.toml").exists() {
52 Self::from_file("openfunctions.toml").await
53 } else if Path::new(".openfunctions/config.toml").exists() {
54 Self::from_file(".openfunctions/config.toml").await
55 } else {
56 Ok(Self::default())
57 }
58 }
59
60 /// Load configuration from a specified file path.
61 ///
62 /// This function reads the file content, parses it from TOML format,
63 /// and validates the configuration.
64 async fn from_file<P: AsRef<Path>>(path: P) -> Result<Self> {
65 let content = tokio::fs::read_to_string(path).await?;
66 let config: Self = toml::from_str(&content)?;
67 config.validate()?;
68 Ok(config)
69 }
70
71 /// Validates the loaded configuration to ensure that essential paths exist.
72 ///
73 /// This method checks for the existence of specified tool and agent directories.
74 fn validate(&self) -> Result<()> {
75 for dir in &self.tool_dirs {
76 if !dir.exists() {
77 anyhow::bail!("Tool directory does not exist: {}", dir.display());
78 }
79 }
80
81 for dir in &self.agent_dirs {
82 if !dir.exists() {
83 anyhow::bail!("Agent directory does not exist: {}", dir.display());
84 }
85 }
86
87 Ok(())
88 }
89}
90
91impl Default for Config {
92 /// Provides a default configuration for the project.
93 fn default() -> Self {
94 Self {
95 project: ProjectConfig::default(),
96 runtime: RuntimeConfig::default(),
97 #[cfg(feature = "openai")]
98 ai: None,
99 tool_dirs: vec![PathBuf::from("tools")],
100 agent_dirs: vec![PathBuf::from("agents")],
101 env: HashMap::new(),
102 }
103 }
104}
105
106/// Project-specific configuration.
107///
108/// Contains metadata about the project and paths for build artifacts and caching.
109#[derive(Debug, Clone, Serialize, Deserialize)]
110pub struct ProjectConfig {
111 /// The name of the project.
112 pub name: String,
113 /// The version of the project.
114 pub version: String,
115 /// An optional description of the project.
116 pub description: Option<String>,
117 /// The directory for storing cached files.
118 pub cache_dir: PathBuf,
119 /// The directory where build outputs are stored.
120 pub output_dir: PathBuf,
121}
122
123impl Default for ProjectConfig {
124 /// Provides a default project configuration.
125 fn default() -> Self {
126 Self {
127 name: "openfunctions-project".to_string(),
128 version: "0.1.0".to_string(),
129 description: None,
130 cache_dir: PathBuf::from(".openfunctions/cache"),
131 output_dir: PathBuf::from("bin"),
132 }
133 }
134}
135
136/// Runtime configuration for executing tools and agents.
137///
138/// These settings control the environment in which functions are executed, including
139/// resource limits and permissions.
140#[derive(Debug, Clone, Serialize, Deserialize)]
141pub struct RuntimeConfig {
142 /// The maximum number of parallel executions allowed.
143 pub max_parallel_executions: usize,
144 /// The timeout for a single execution, in seconds.
145 pub execution_timeout: u64,
146 /// The memory limit for an execution, in bytes. `None` means no limit.
147 pub memory_limit: Option<u64>,
148 /// A list of commands that are allowed to be executed by tools.
149 pub allowed_commands: Vec<String>,
150 /// A list of environment variables to pass through to the execution environment.
151 pub env_passthrough: Vec<String>,
152}
153
154impl Default for RuntimeConfig {
155 /// Provides a default runtime configuration.
156 fn default() -> Self {
157 Self {
158 max_parallel_executions: 4,
159 execution_timeout: 300,
160 memory_limit: None,
161 allowed_commands: vec![],
162 env_passthrough: vec!["PATH".to_string(), "HOME".to_string()],
163 }
164 }
165}
166
167/// AI service configuration, specifically for OpenAI models.
168///
169/// This is only available when the `openai` feature is enabled.
170#[cfg(feature = "openai")]
171#[derive(Debug, Clone, Serialize, Deserialize)]
172pub struct AiConfig {
173 /// The API key for the OpenAI service. Can be omitted if set in the environment.
174 pub api_key: Option<String>,
175 /// The model to use for AI completions, e.g., "gpt-4o-mini".
176 pub model: String,
177 /// The maximum number of tokens to generate in a completion.
178 pub max_tokens: u32,
179 /// The sampling temperature to use for generation.
180 pub temperature: f32,
181 /// The model to use for creating embeddings.
182 pub embedding_model: String,
183}
184
185#[cfg(feature = "openai")]
186impl Default for AiConfig {
187 /// Provides a default AI configuration for OpenAI services.
188 fn default() -> Self {
189 Self {
190 api_key: None,
191 model: "gpt-4o-mini".to_string(),
192 max_tokens: 2000,
193 temperature: 0.7,
194 embedding_model: "text-embedding-3-small".to_string(),
195 }
196 }
197}