#![allow(dead_code)]
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
use thiserror::Error;
#[derive(Error, Debug)]
pub enum ConfigError {
#[error("Failed to read config: {0}")]
ReadError(#[from] std::io::Error),
#[error("Failed to parse config: {0}")]
ParseError(#[from] toml::de::Error),
#[error("Failed to serialize config: {0}")]
SerializeError(#[from] toml::ser::Error),
#[error("Could not find home directory")]
NoHomeDir,
#[error("No LLM provider configured. Set one of: openai_api_key, anthropic_api_key, gemini_api_key")]
NoLlmProvider,
#[error("Invalid API key for {0}: {1}")]
InvalidApiKey(String, String),
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct LlmConfig {
pub provider: Option<String>,
pub model: Option<String>,
pub openai_api_key: Option<String>,
pub anthropic_api_key: Option<String>,
pub gemini_api_key: Option<String>,
pub litellm_api_key: Option<String>,
pub litellm_base_url: Option<String>,
pub max_tokens: Option<u32>,
pub temperature: Option<f32>,
}
impl LlmConfig {
pub fn validate(&self) -> Result<(), ConfigError> {
let has_key = self.openai_api_key.is_some()
|| self.anthropic_api_key.is_some()
|| self.gemini_api_key.is_some()
|| self.litellm_api_key.is_some();
if !has_key {
return Err(ConfigError::NoLlmProvider);
}
Ok(())
}
pub fn provider(&self) -> &str {
self.provider.as_deref().unwrap_or("openai")
}
pub fn model(&self) -> &str {
self.model.as_deref().unwrap_or("gpt-4o-mini")
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AppConfig {
pub github_token: Option<String>,
pub gitlab_token: Option<String>,
pub bitbucket_token: Option<String>,
pub scan_depth_days: i64,
pub max_concurrent_scans: usize,
pub exclude_patterns: Vec<String>,
pub include_forks: bool,
pub include_archived: bool,
pub auto_refresh: bool,
pub refresh_interval_hours: u64,
pub llm: LlmConfig,
#[serde(default)]
pub monitor: MonitorConfig,
#[serde(default)]
pub cloud: CloudConfig,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MonitorConfig {
pub screenshot_interval: u64,
pub idle_threshold: u64,
pub capture_keyboard: bool,
pub capture_mouse: bool,
}
impl Default for MonitorConfig {
fn default() -> Self {
Self {
screenshot_interval: 5,
idle_threshold: 300,
capture_keyboard: true,
capture_mouse: true,
}
}
}
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct CloudConfig {
pub provider: Option<String>,
pub bucket: Option<String>,
pub region: Option<String>,
pub endpoint: Option<String>,
pub prefix: Option<String>,
pub access_key: Option<String>,
pub secret_key: Option<String>,
}
impl Default for AppConfig {
fn default() -> Self {
Self {
github_token: None,
gitlab_token: None,
bitbucket_token: None,
scan_depth_days: 30,
max_concurrent_scans: 5,
exclude_patterns: vec![
"node_modules".to_string(),
"target".to_string(),
".git".to_string(),
"__pycache__".to_string(),
],
include_forks: false,
include_archived: false,
auto_refresh: true,
refresh_interval_hours: 24,
llm: LlmConfig::default(),
monitor: MonitorConfig::default(),
cloud: CloudConfig::default(),
}
}
}
impl AppConfig {
pub fn load() -> Result<Self, ConfigError> {
let config_path = Self::config_path()?;
if !config_path.exists() {
return Ok(Self::default());
}
let content = std::fs::read_to_string(&config_path)?;
let config: Self = toml::from_str(&content)?;
Ok(config)
}
pub fn save(&self) -> Result<(), ConfigError> {
let config_path = Self::config_path()?;
if let Some(parent) = config_path.parent() {
std::fs::create_dir_all(parent)?;
}
let content = toml::to_string_pretty(self)?;
std::fs::write(&config_path, content)?;
Ok(())
}
pub fn validate(&self) -> Result<(), ConfigError> {
self.llm.validate()?;
Ok(())
}
fn config_path() -> Result<PathBuf, ConfigError> {
let home = dirs::home_dir().ok_or(ConfigError::NoHomeDir)?;
Ok(home.join(".i-self").join("config.toml"))
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::io::Write;
use tempfile::TempDir;
#[test]
fn test_default_config() {
let config = AppConfig::default();
assert_eq!(config.scan_depth_days, 30);
assert_eq!(config.max_concurrent_scans, 5);
assert!(!config.include_forks);
assert!(config.auto_refresh);
}
#[test]
fn test_llm_config_defaults() {
let llm = LlmConfig::default();
assert_eq!(llm.provider(), "openai");
assert_eq!(llm.model(), "gpt-4o-mini");
}
#[test]
fn test_llm_config_validation_no_keys() {
let llm = LlmConfig::default();
let result = llm.validate();
assert!(result.is_err());
}
#[test]
fn test_llm_config_validation_with_openai_key() {
let mut llm = LlmConfig::default();
llm.openai_api_key = Some("sk-test".to_string());
assert!(llm.validate().is_ok());
}
#[test]
fn test_llm_config_validation_with_anthropic_key() {
let mut llm = LlmConfig::default();
llm.anthropic_api_key = Some("sk-ant-test".to_string());
assert!(llm.validate().is_ok());
}
#[test]
fn test_llm_config_validation_with_gemini_key() {
let mut llm = LlmConfig::default();
llm.gemini_api_key = Some("AIza-test".to_string());
assert!(llm.validate().is_ok());
}
#[test]
fn test_monitor_config_defaults() {
let monitor = MonitorConfig::default();
assert_eq!(monitor.screenshot_interval, 5);
assert_eq!(monitor.idle_threshold, 300);
assert!(monitor.capture_keyboard);
assert!(monitor.capture_mouse);
}
#[test]
fn test_cloud_config_defaults() {
let cloud = CloudConfig::default();
assert!(cloud.provider.is_none());
assert!(cloud.bucket.is_none());
}
#[test]
fn test_config_serialization() {
let config = AppConfig::default();
let serialized = toml::to_string(&config).unwrap();
let deserialized: AppConfig = toml::from_str(&serialized).unwrap();
assert_eq!(deserialized.scan_depth_days, 30);
}
#[test]
fn test_config_with_llm() {
let mut config = AppConfig::default();
config.llm.openai_api_key = Some("sk-test".to_string());
config.llm.provider = Some("openai".to_string());
config.llm.model = Some("gpt-4".to_string());
assert!(config.validate().is_ok());
}
#[test]
fn test_config_path_requires_home() {
let result = AppConfig::config_path();
if result.is_ok() {
let path = result.unwrap();
assert!(path.to_string_lossy().contains(".i-self"));
}
}
}