i-self 0.4.3

Personal developer-companion CLI: scans your repos, indexes code semantically, watches your activity, and moves AI-agent sessions between tools (Claude Code, Aider, Goose, OpenAI Codex CLI, Continue.dev, OpenCode).
#![allow(dead_code)]

use serde::{Deserialize, Serialize};
use std::path::PathBuf;
use thiserror::Error;

#[derive(Error, Debug)]
pub enum ConfigError {
    #[error("Failed to read config: {0}")]
    ReadError(#[from] std::io::Error),
    #[error("Failed to parse config: {0}")]
    ParseError(#[from] toml::de::Error),
    #[error("Failed to serialize config: {0}")]
    SerializeError(#[from] toml::ser::Error),
    #[error("Could not find home directory")]
    NoHomeDir,
    #[error("No LLM provider configured. Set one of: openai_api_key, anthropic_api_key, gemini_api_key")]
    NoLlmProvider,
    #[error("Invalid API key for {0}: {1}")]
    InvalidApiKey(String, String),
}

/// LLM Provider configuration
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct LlmConfig {
    pub provider: Option<String>,
    pub model: Option<String>,
    pub openai_api_key: Option<String>,
    pub anthropic_api_key: Option<String>,
    pub gemini_api_key: Option<String>,
    pub litellm_api_key: Option<String>,
    pub litellm_base_url: Option<String>,
    pub max_tokens: Option<u32>,
    pub temperature: Option<f32>,
}

impl LlmConfig {
    pub fn validate(&self) -> Result<(), ConfigError> {
        let has_key = self.openai_api_key.is_some()
            || self.anthropic_api_key.is_some()
            || self.gemini_api_key.is_some()
            || self.litellm_api_key.is_some();

        if !has_key {
            return Err(ConfigError::NoLlmProvider);
        }
        Ok(())
    }

    pub fn provider(&self) -> &str {
        self.provider.as_deref().unwrap_or("openai")
    }

    pub fn model(&self) -> &str {
        self.model.as_deref().unwrap_or("gpt-4o-mini")
    }
}

/// Application configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AppConfig {
    pub github_token: Option<String>,
    pub gitlab_token: Option<String>,
    pub bitbucket_token: Option<String>,
    pub scan_depth_days: i64,
    pub max_concurrent_scans: usize,
    pub exclude_patterns: Vec<String>,
    pub include_forks: bool,
    pub include_archived: bool,
    pub auto_refresh: bool,
    pub refresh_interval_hours: u64,
    pub llm: LlmConfig,
    #[serde(default)]
    pub monitor: MonitorConfig,
    #[serde(default)]
    pub cloud: CloudConfig,
}

/// Monitor configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MonitorConfig {
    pub screenshot_interval: u64,
    pub idle_threshold: u64,
    pub capture_keyboard: bool,
    pub capture_mouse: bool,
}

impl Default for MonitorConfig {
    fn default() -> Self {
        Self {
            screenshot_interval: 5,
            idle_threshold: 300,
            capture_keyboard: true,
            capture_mouse: true,
        }
    }
}

/// Cloud sync configuration. Only S3-compatible backends are supported as
/// of 0.4 (see CHANGELOG); the `provider` field is kept for future use but
/// currently has no effect — `endpoint` is what selects MinIO / R2 / Spaces /
/// B2 vs. AWS S3.
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct CloudConfig {
    pub provider: Option<String>,
    pub bucket: Option<String>,
    pub region: Option<String>,
    /// S3 endpoint URL. Leave unset for AWS S3; set to e.g.
    /// `http://localhost:9000` for MinIO,
    /// `https://<account>.r2.cloudflarestorage.com` for Cloudflare R2.
    pub endpoint: Option<String>,
    /// Optional key prefix prepended to every object — useful for sharing
    /// a bucket across hosts / users.
    pub prefix: Option<String>,
    pub access_key: Option<String>,
    pub secret_key: Option<String>,
}

impl Default for AppConfig {
    fn default() -> Self {
        Self {
            github_token: None,
            gitlab_token: None,
            bitbucket_token: None,
            scan_depth_days: 30,
            max_concurrent_scans: 5,
            exclude_patterns: vec![
                "node_modules".to_string(),
                "target".to_string(),
                ".git".to_string(),
                "__pycache__".to_string(),
            ],
            include_forks: false,
            include_archived: false,
            auto_refresh: true,
            refresh_interval_hours: 24,
            llm: LlmConfig::default(),
            monitor: MonitorConfig::default(),
            cloud: CloudConfig::default(),
        }
    }
}

impl AppConfig {
    pub fn load() -> Result<Self, ConfigError> {
        let config_path = Self::config_path()?;
        
        if !config_path.exists() {
            return Ok(Self::default());
        }

        let content = std::fs::read_to_string(&config_path)?;
        let config: Self = toml::from_str(&content)?;
        
        Ok(config)
    }

    pub fn save(&self) -> Result<(), ConfigError> {
        let config_path = Self::config_path()?;
        
        if let Some(parent) = config_path.parent() {
            std::fs::create_dir_all(parent)?;
        }

        let content = toml::to_string_pretty(self)?;
        std::fs::write(&config_path, content)?;
        
        Ok(())
    }

    pub fn validate(&self) -> Result<(), ConfigError> {
        self.llm.validate()?;
        Ok(())
    }

    fn config_path() -> Result<PathBuf, ConfigError> {
        let home = dirs::home_dir().ok_or(ConfigError::NoHomeDir)?;
        Ok(home.join(".i-self").join("config.toml"))
    }
}

#[cfg(test)]
mod tests {
    use super::*;
    use std::io::Write;
    use tempfile::TempDir;

    #[test]
    fn test_default_config() {
        let config = AppConfig::default();
        assert_eq!(config.scan_depth_days, 30);
        assert_eq!(config.max_concurrent_scans, 5);
        assert!(!config.include_forks);
        assert!(config.auto_refresh);
    }

    #[test]
    fn test_llm_config_defaults() {
        let llm = LlmConfig::default();
        assert_eq!(llm.provider(), "openai");
        assert_eq!(llm.model(), "gpt-4o-mini");
    }

    #[test]
    fn test_llm_config_validation_no_keys() {
        let llm = LlmConfig::default();
        let result = llm.validate();
        assert!(result.is_err());
    }

    #[test]
    fn test_llm_config_validation_with_openai_key() {
        let mut llm = LlmConfig::default();
        llm.openai_api_key = Some("sk-test".to_string());
        assert!(llm.validate().is_ok());
    }

    #[test]
    fn test_llm_config_validation_with_anthropic_key() {
        let mut llm = LlmConfig::default();
        llm.anthropic_api_key = Some("sk-ant-test".to_string());
        assert!(llm.validate().is_ok());
    }

    #[test]
    fn test_llm_config_validation_with_gemini_key() {
        let mut llm = LlmConfig::default();
        llm.gemini_api_key = Some("AIza-test".to_string());
        assert!(llm.validate().is_ok());
    }

    #[test]
    fn test_monitor_config_defaults() {
        let monitor = MonitorConfig::default();
        assert_eq!(monitor.screenshot_interval, 5);
        assert_eq!(monitor.idle_threshold, 300);
        assert!(monitor.capture_keyboard);
        assert!(monitor.capture_mouse);
    }

    #[test]
    fn test_cloud_config_defaults() {
        let cloud = CloudConfig::default();
        assert!(cloud.provider.is_none());
        assert!(cloud.bucket.is_none());
    }

    #[test]
    fn test_config_serialization() {
        let config = AppConfig::default();
        let serialized = toml::to_string(&config).unwrap();
        let deserialized: AppConfig = toml::from_str(&serialized).unwrap();
        assert_eq!(deserialized.scan_depth_days, 30);
    }

    #[test]
    fn test_config_with_llm() {
        let mut config = AppConfig::default();
        config.llm.openai_api_key = Some("sk-test".to_string());
        config.llm.provider = Some("openai".to_string());
        config.llm.model = Some("gpt-4".to_string());
        
        assert!(config.validate().is_ok());
    }

    #[test]
    fn test_config_path_requires_home() {
        let result = AppConfig::config_path();
        // This should work on normal systems but may fail in unusual environments
        // We just verify it returns a PathBuf
        if result.is_ok() {
            let path = result.unwrap();
            assert!(path.to_string_lossy().contains(".i-self"));
        }
    }
}