1use crate::{ConfigError, GgenConfig, Result};
6use std::fs;
7use std::path::{Path, PathBuf};
8
9pub struct ConfigLoader {
11 path: PathBuf,
12}
13
14impl ConfigLoader {
15 pub fn new<P: AsRef<Path>>(path: P) -> Result<Self> {
25 let path = path.as_ref().to_path_buf();
26 if !path.exists() {
27 return Err(ConfigError::FileNotFound(path));
28 }
29 Ok(Self { path })
30 }
31
32 pub fn from_file<P: AsRef<Path>>(path: P) -> Result<GgenConfig> {
54 let loader = Self::new(path)?;
55 loader.load()
56 }
57
58 pub fn from_str(content: &str) -> Result<GgenConfig> {
83 let config: GgenConfig = toml::from_str(content)?;
84 Ok(config)
85 }
86
87 pub fn load(&self) -> Result<GgenConfig> {
93 let content = fs::read_to_string(&self.path)?;
94 Self::from_str(&content)
95 }
96
97 pub fn find_and_load() -> Result<GgenConfig> {
118 let path = Self::find_config_file()?;
119 Self::from_file(path)
120 }
121
122 pub fn find_config_file() -> Result<PathBuf> {
128 let mut current = std::env::current_dir().map_err(|e| ConfigError::Io(e))?;
129
130 loop {
131 let candidate = current.join("ggen.toml");
132 if candidate.exists() {
133 return Ok(candidate);
134 }
135
136 if !current.pop() {
138 return Err(ConfigError::FileNotFound(PathBuf::from(
140 "ggen.toml (searched all parent directories)",
141 )));
142 }
143 }
144 }
145
146 pub fn load_with_env(&self, environment: &str) -> Result<GgenConfig> {
156 let mut config = self.load()?;
157
158 if let Some(env_overrides) = config.env.clone() {
160 if let Some(overrides) = env_overrides.get(environment) {
161 apply_env_overrides(&mut config, overrides)?;
162 }
163 }
164
165 Ok(config)
166 }
167
168 #[must_use]
170 pub fn path(&self) -> &Path {
171 &self.path
172 }
173}
174
175fn apply_env_overrides(config: &mut GgenConfig, overrides: &serde_json::Value) -> Result<()> {
179 if let Some(obj) = overrides.as_object() {
180 for (key, value) in obj {
181 apply_single_override(config, key, value)?;
183 }
184 }
185 Ok(())
186}
187
188fn apply_single_override(
190 config: &mut GgenConfig, key: &str, value: &serde_json::Value,
191) -> Result<()> {
192 let parts: Vec<&str> = key.split('.').collect();
194
195 match parts.as_slice() {
196 ["ai", field] => {
197 if let Some(ai_config) = config.ai.as_mut() {
198 update_ai_field(ai_config, field, value)?;
199 }
200 }
201 ["logging", "level"] => {
202 if let Some(logging) = config.logging.as_mut() {
203 if let Some(s) = value.as_str() {
204 logging.level = s.to_string();
205 }
206 }
207 }
208 ["security", field] => {
209 if let Some(security) = config.security.as_mut() {
210 update_security_field(security, field, value)?;
211 }
212 }
213 _ => {
214 }
216 }
217
218 Ok(())
219}
220
221fn update_ai_field(
223 ai: &mut crate::schema::AiConfig, field: &str, value: &serde_json::Value,
224) -> Result<()> {
225 match field {
226 "model" => {
227 if let Some(s) = value.as_str() {
228 ai.model = s.to_string();
229 }
230 }
231 "temperature" => {
232 if let Some(f) = value.as_f64() {
233 ai.temperature = f as f32;
234 }
235 }
236 "max_tokens" => {
237 if let Some(n) = value.as_u64() {
238 ai.max_tokens = n as u32;
239 }
240 }
241 _ => {}
242 }
243 Ok(())
244}
245
246fn update_security_field(
248 security: &mut crate::schema::SecurityConfig, field: &str, value: &serde_json::Value,
249) -> Result<()> {
250 match field {
251 "require_confirmation" => {
252 if let Some(b) = value.as_bool() {
253 security.require_confirmation = b;
254 }
255 }
256 "audit_operations" => {
257 if let Some(b) = value.as_bool() {
258 security.audit_operations = b;
259 }
260 }
261 _ => {}
262 }
263 Ok(())
264}
265
266#[cfg(test)]
267mod tests {
268 use super::*;
269
270 #[test]
271 fn test_parse_minimal_config() {
272 let toml = r#"
273 [project]
274 name = "test-project"
275 version = "1.0.0"
276 "#;
277
278 let config = ConfigLoader::from_str(toml).unwrap();
279 assert_eq!(config.project.name, "test-project");
280 assert_eq!(config.project.version, "1.0.0");
281 assert!(config.ai.is_none());
282 }
283
284 #[test]
285 fn test_parse_full_config() {
286 let toml = r#"
287 [project]
288 name = "full-project"
289 version = "2.0.0"
290 description = "A test project"
291
292 [ai]
293 provider = "openai"
294 model = "gpt-4"
295 temperature = 0.8
296 max_tokens = 3000
297
298 [templates]
299 directory = "templates"
300 output_directory = "generated"
301 backup_enabled = true
302 "#;
303
304 let config = ConfigLoader::from_str(toml).unwrap();
305 assert_eq!(config.project.name, "full-project");
306
307 let ai = config.ai.as_ref().unwrap();
308 assert_eq!(ai.provider, "openai");
309 assert_eq!(ai.model, "gpt-4");
310 assert!((ai.temperature - 0.8).abs() < f32::EPSILON);
311
312 let templates = config.templates.as_ref().unwrap();
313 assert_eq!(templates.directory.as_ref().unwrap(), "templates");
314 assert!(templates.backup_enabled);
315 }
316
317 #[test]
318 fn test_default_values() {
319 let toml = r#"
320 [project]
321 name = "defaults"
322 version = "1.0.0"
323
324 [ai]
325 provider = "ollama"
326 model = "llama2"
327 "#;
328
329 let config = ConfigLoader::from_str(toml).unwrap();
330 let ai = config.ai.as_ref().unwrap();
331
332 assert!((ai.temperature - 0.7).abs() < f32::EPSILON);
334 assert_eq!(ai.max_tokens, 2000);
335 assert_eq!(ai.timeout, 30);
336 }
337}