openfunctions-rs 0.1.0

A universal framework for creating and managing LLM tools and agents
Documentation
//! Configuration management for OpenFunctions
//!
//! This module defines the structures for managing project configuration,
//! including settings for the project, runtime, AI services, and environment.

use anyhow::Result;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::{Path, PathBuf};

/// Main configuration structure for the OpenFunctions project.
///
/// This struct holds all configuration settings, loaded from a `openfunctions.toml`
/// file. It is organized into sections for better management of settings.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Config {
    /// Project-specific settings, like name, version, and important directories.
    pub project: ProjectConfig,

    /// Settings related to the execution runtime for tools and agents.
    pub runtime: RuntimeConfig,

    /// Configuration for AI services, such as OpenAI.
    /// This is only available when the "openai" feature is enabled.
    #[cfg(feature = "openai")]
    pub ai: Option<AiConfig>,

    /// A list of directories where tool definitions are stored.
    pub tool_dirs: Vec<PathBuf>,

    /// A list of directories where agent definitions are stored.
    pub agent_dirs: Vec<PathBuf>,

    /// Environment variables to be made available to tools and agents during execution.
    pub env: HashMap<String, String>,
}

impl Config {
    /// Loads the configuration for the project.
    ///
    /// The configuration is loaded from a file. The following paths are checked in order:
    /// 1. A path explicitly provided as an argument.
    /// 2. `openfunctions.toml` in the current directory.
    /// 3. `.openfunctions/config.toml` in the current directory.
    ///
    /// If no configuration file is found in these locations, a default configuration is
    /// created and returned.
    pub async fn load(path: Option<PathBuf>) -> Result<Self> {
        if let Some(path) = path {
            Self::from_file(&path).await
        } else if Path::new("openfunctions.toml").exists() {
            Self::from_file("openfunctions.toml").await
        } else if Path::new(".openfunctions/config.toml").exists() {
            Self::from_file(".openfunctions/config.toml").await
        } else {
            Ok(Self::default())
        }
    }

    /// Load configuration from a specified file path.
    ///
    /// This function reads the file content, parses it from TOML format,
    /// and validates the configuration.
    async fn from_file<P: AsRef<Path>>(path: P) -> Result<Self> {
        let content = tokio::fs::read_to_string(path).await?;
        let config: Self = toml::from_str(&content)?;
        config.validate()?;
        Ok(config)
    }

    /// Validates the loaded configuration to ensure that essential paths exist.
    ///
    /// This method checks for the existence of specified tool and agent directories.
    fn validate(&self) -> Result<()> {
        for dir in &self.tool_dirs {
            if !dir.exists() {
                anyhow::bail!("Tool directory does not exist: {}", dir.display());
            }
        }

        for dir in &self.agent_dirs {
            if !dir.exists() {
                anyhow::bail!("Agent directory does not exist: {}", dir.display());
            }
        }

        Ok(())
    }
}

impl Default for Config {
    /// Provides a default configuration for the project.
    fn default() -> Self {
        Self {
            project: ProjectConfig::default(),
            runtime: RuntimeConfig::default(),
            #[cfg(feature = "openai")]
            ai: None,
            tool_dirs: vec![PathBuf::from("tools")],
            agent_dirs: vec![PathBuf::from("agents")],
            env: HashMap::new(),
        }
    }
}

/// Project-specific configuration.
///
/// Contains metadata about the project and paths for build artifacts and caching.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ProjectConfig {
    /// The name of the project.
    pub name: String,
    /// The version of the project.
    pub version: String,
    /// An optional description of the project.
    pub description: Option<String>,
    /// The directory for storing cached files.
    pub cache_dir: PathBuf,
    /// The directory where build outputs are stored.
    pub output_dir: PathBuf,
}

impl Default for ProjectConfig {
    /// Provides a default project configuration.
    fn default() -> Self {
        Self {
            name: "openfunctions-project".to_string(),
            version: "0.1.0".to_string(),
            description: None,
            cache_dir: PathBuf::from(".openfunctions/cache"),
            output_dir: PathBuf::from("bin"),
        }
    }
}

/// Runtime configuration for executing tools and agents.
///
/// These settings control the environment in which functions are executed, including
/// resource limits and permissions.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RuntimeConfig {
    /// The maximum number of parallel executions allowed.
    pub max_parallel_executions: usize,
    /// The timeout for a single execution, in seconds.
    pub execution_timeout: u64,
    /// The memory limit for an execution, in bytes. `None` means no limit.
    pub memory_limit: Option<u64>,
    /// A list of commands that are allowed to be executed by tools.
    pub allowed_commands: Vec<String>,
    /// A list of environment variables to pass through to the execution environment.
    pub env_passthrough: Vec<String>,
}

impl Default for RuntimeConfig {
    /// Provides a default runtime configuration.
    fn default() -> Self {
        Self {
            max_parallel_executions: 4,
            execution_timeout: 300,
            memory_limit: None,
            allowed_commands: vec![],
            env_passthrough: vec!["PATH".to_string(), "HOME".to_string()],
        }
    }
}

/// AI service configuration, specifically for OpenAI models.
///
/// This is only available when the `openai` feature is enabled.
#[cfg(feature = "openai")]
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AiConfig {
    /// The API key for the OpenAI service. Can be omitted if set in the environment.
    pub api_key: Option<String>,
    /// The model to use for AI completions, e.g., "gpt-4o-mini".
    pub model: String,
    /// The maximum number of tokens to generate in a completion.
    pub max_tokens: u32,
    /// The sampling temperature to use for generation.
    pub temperature: f32,
    /// The model to use for creating embeddings.
    pub embedding_model: String,
}

#[cfg(feature = "openai")]
impl Default for AiConfig {
    /// Provides a default AI configuration for OpenAI services.
    fn default() -> Self {
        Self {
            api_key: None,
            model: "gpt-4o-mini".to_string(),
            max_tokens: 2000,
            temperature: 0.7,
            embedding_model: "text-embedding-3-small".to_string(),
        }
    }
}