use anyhow::Result;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Config {
pub search: SearchConfig,
pub output: OutputConfig,
#[cfg(feature = "ai")]
pub ai: Option<AiConfig>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SearchConfig {
pub ignore_case: bool,
pub regex: bool,
pub follow_symlinks: bool,
pub max_depth: Option<usize>,
pub max_file_size: Option<u64>,
pub threads: usize,
pub hidden: bool,
pub respect_gitignore: bool,
pub include_binary: bool,
pub max_matches_per_file: Option<usize>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OutputConfig {
pub color: bool,
pub line_numbers: bool,
pub before_context: usize,
pub after_context: usize,
pub show_ast_context: bool,
pub max_ast_depth: usize,
pub highlight: bool,
pub show_file_headers: bool,
pub show_match_count: bool,
}
#[cfg(feature = "ai")]
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AiConfig {
pub api_key: String,
pub model: String,
pub enable_insights: bool,
pub enable_explanation: bool,
pub max_tokens: u32,
pub temperature: f32,
}
impl Default for Config {
fn default() -> Self {
Self {
search: SearchConfig::default(),
output: OutputConfig::default(),
#[cfg(feature = "ai")]
ai: None,
}
}
}
impl Default for SearchConfig {
fn default() -> Self {
Self {
ignore_case: false,
regex: false,
follow_symlinks: false,
max_depth: None,
max_file_size: Some(50 * 1024 * 1024), threads: num_cpus::get(),
hidden: false,
respect_gitignore: true,
include_binary: false,
max_matches_per_file: Some(1000),
}
}
}
impl Default for OutputConfig {
fn default() -> Self {
Self {
color: atty::is(atty::Stream::Stdout),
line_numbers: true,
before_context: 2,
after_context: 2,
show_ast_context: false,
max_ast_depth: 3,
highlight: true,
show_file_headers: true,
show_match_count: false,
}
}
}
#[cfg(feature = "ai")]
impl Default for AiConfig {
fn default() -> Self {
Self {
api_key: String::new(),
model: "gpt-4o-mini".to_string(),
enable_insights: false,
enable_explanation: false,
max_tokens: 1000,
temperature: 0.3,
}
}
}
impl Config {
pub fn from_cli(cli: &crate::cli::Cli) -> Result<Self> {
let mut config = Self::default();
config.search.ignore_case = cli.ignore_case;
config.search.regex = cli.regex;
config.search.follow_symlinks = cli.follow_symlinks;
config.search.max_depth = cli.max_depth;
config.search.max_file_size = cli.max_file_size;
config.search.threads = cli.threads.unwrap_or_else(num_cpus::get);
config.search.hidden = cli.hidden;
config.search.respect_gitignore = !cli.no_ignore;
config.output.color = cli.color.unwrap_or_else(|| atty::is(atty::Stream::Stdout));
config.output.line_numbers = cli.line_numbers;
config.output.before_context = cli.before_context;
config.output.after_context = cli.after_context;
config.output.show_ast_context = cli.ast_context;
config.output.max_ast_depth = cli.max_ast_depth;
#[cfg(feature = "ai")]
if cli.ai_insights || cli.ai_explain {
config.ai = Some(AiConfig {
api_key: std::env::var("OPENAI_API_KEY")
.map_err(|_| anyhow::anyhow!("OPENAI_API_KEY environment variable required for AI features"))?,
model: cli.ai_model.clone(),
enable_insights: cli.ai_insights,
enable_explanation: cli.ai_explain,
max_tokens: 1000,
temperature: 0.3,
});
}
Ok(config)
}
pub fn from_file(path: &std::path::Path) -> Result<Self> {
let content = std::fs::read_to_string(path)?;
let config = if path.extension().and_then(|s| s.to_str()) == Some("toml") {
toml::from_str(&content)?
} else {
serde_yaml::from_str(&content)?
};
Ok(config)
}
pub fn save_to_file(&self, path: &std::path::Path) -> Result<()> {
let content = if path.extension().and_then(|s| s.to_str()) == Some("toml") {
toml::to_string_pretty(self)?
} else {
serde_yaml::to_string(self)?
};
std::fs::write(path, content)?;
Ok(())
}
pub fn validate(&self) -> Result<()> {
if self.search.threads == 0 {
anyhow::bail!("Thread count must be greater than 0");
}
if self.output.before_context > 1000 || self.output.after_context > 1000 {
anyhow::bail!("Context lines must be <= 1000");
}
if self.output.max_ast_depth > 20 {
anyhow::bail!("AST depth must be <= 20");
}
#[cfg(feature = "ai")]
if let Some(ai_config) = &self.ai {
if ai_config.api_key.is_empty() && (ai_config.enable_insights || ai_config.enable_explanation) {
anyhow::bail!("OpenAI API key required for AI features");
}
if ai_config.max_tokens == 0 || ai_config.max_tokens > 4096 {
anyhow::bail!("AI max tokens must be between 1 and 4096");
}
if !(0.0..=2.0).contains(&ai_config.temperature) {
anyhow::bail!("AI temperature must be between 0.0 and 2.0");
}
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_default_config() {
let config = Config::default();
assert!(!config.search.ignore_case);
assert!(!config.search.regex);
assert!(config.search.respect_gitignore);
assert!(config.output.line_numbers);
}
#[test]
fn test_config_validation() {
let mut config = Config::default();
assert!(config.validate().is_ok());
config.search.threads = 0;
assert!(config.validate().is_err());
config.search.threads = 1;
config.output.before_context = 2000;
assert!(config.validate().is_err());
}
#[test]
fn test_config_serialization() {
let config = Config::default();
let yaml = serde_yaml::to_string(&config).unwrap();
let deserialized: Config = serde_yaml::from_str(&yaml).unwrap();
assert_eq!(config.search.threads, deserialized.search.threads);
assert_eq!(config.output.line_numbers, deserialized.output.line_numbers);
}
}