1mod builder;
2mod error;
3mod types;
4
5pub use builder::ConfigBuilder;
6pub use error::{ConfigError, Result};
7pub use types::{Config, DEFAULT_API_BASE, DEFAULT_MODEL, DEFAULT_PATH};
8
9use config::{Config as ConfigRs, Environment, File, FileFormat};
10use std::path::{Path, PathBuf};
11
12impl Config {
13 pub fn builder() -> ConfigBuilder {
14 ConfigBuilder::new()
15 }
16
17 pub fn load(git_root: &Path) -> Result<Self> {
18 let config_path = git_root.join(".rune.yml");
19
20 let mut builder = ConfigRs::builder()
21 .set_default("llm.model", DEFAULT_MODEL)
22 .map_err(|e| ConfigError::LoadError {
23 path: config_path.clone(),
24 message: e.to_string(),
25 })?
26 .set_default("parse.path", DEFAULT_PATH)
27 .map_err(|e| ConfigError::LoadError {
28 path: config_path.clone(),
29 message: e.to_string(),
30 })?;
31
32 if config_path.exists() {
33 builder = builder.add_source(File::new(".rune", FileFormat::Yaml));
34 }
35
36 builder = builder
38 .add_source(
39 Environment::with_prefix("RUNE")
40 .separator("_")
41 .try_parsing(true),
42 )
43 .add_source(Environment::with_prefix("OPENAI_API_KEY").separator("_"))
44 .add_source(Environment::with_prefix("LLM_API_KEY").separator("_"))
45 .add_source(Environment::with_prefix("LLM_API_BASE").separator("_"));
46
47 let config: Self = builder
48 .build()
49 .map_err(|e| ConfigError::LoadError {
50 path: config_path.clone(),
51 message: e.to_string(),
52 })?
53 .try_deserialize()
54 .map_err(|e| ConfigError::Parse(e.to_string()))?;
55
56 config.validate()?;
57
58 Ok(config)
59 }
60
61 pub fn save(&self, path: &Path) -> Result<()> {
62 let template = include_str!("../../templates/default.yml.template");
63
64 let config_content = template
65 .replace("{model}", &self.llm.model)
66 .replace("{path}", &self.parse.path);
67
68 std::fs::write(path, config_content).map_err(|e| ConfigError::SaveError {
69 path: path.to_owned(),
70 message: e.to_string(),
71 })?;
72
73 Ok(())
74 }
75
76 fn validate(&self) -> Result<()> {
77 if self.llm.model.trim().is_empty() {
78 return Err(ConfigError::ValidationError(
79 "Model name cannot be empty".into(),
80 ));
81 }
82
83 if !PathBuf::from(&self.parse.path).exists() {
84 return Err(ConfigError::ValidationError(format!(
85 "Path '{}' does not exist",
86 self.parse.path
87 )));
88 }
89
90 Ok(())
91 }
92
93 pub fn get_api_key(&self) -> Result<String> {
94 self.llm
95 .api_key
96 .clone()
97 .or_else(|| std::env::var("OPENAI_API_KEY").ok())
98 .or_else(|| std::env::var("LLM_API_KEY").ok())
99 .ok_or(ConfigError::MissingApiKey)
100 }
101
102 pub fn get_api_base(&self) -> String {
103 self.llm
104 .api_base
105 .clone()
106 .or_else(|| std::env::var("LLM_API_BASE").ok())
107 .unwrap_or_else(|| DEFAULT_API_BASE.to_string())
108 }
109
110 pub fn get_model(&self) -> &str {
111 &self.llm.model
112 }
113
114 pub fn get_commit_exclude_patterns(&self) -> Option<String> {
115 self.commit.exclude.clone()
116 }
117}
118
119#[cfg(test)]
120mod tests {
121 use super::*;
122 use tempfile::TempDir;
123
124 #[test]
125 fn test_builder() {
126 let config = Config::builder()
127 .api_key("test-key")
128 .model("gpt-4")
129 .path("./src")
130 .build();
131
132 assert_eq!(config.llm.api_key, Some("test-key".to_string()));
133 assert_eq!(config.llm.model, "gpt-4");
134 assert_eq!(config.parse.path, "./src");
135 }
136
137 #[test]
138 fn test_defaults() {
139 let config = Config::default();
140 assert_eq!(config.llm.model, DEFAULT_MODEL);
141 assert_eq!(config.parse.path, DEFAULT_PATH);
142 }
143
144 #[test]
145 fn test_validation() {
146 let temp_dir = TempDir::new().unwrap();
147 let config = Config::builder()
148 .model("")
149 .path(temp_dir.path().to_str().unwrap())
150 .build();
151
152 assert!(matches!(
153 config.validate(),
154 Err(ConfigError::ValidationError(_))
155 ));
156 }
157}