1use anyhow::{Context, Result};
2use git2::Repository;
3use std::collections::HashMap;
4use std::fs;
5use std::path::Path;
6
7use crate::git_helper::get_all_commits_by_path;
8use crate::models::{Change, Feature, Stats};
9use crate::readme_parser::read_readme_info;
10
11fn is_documentation_directory(dir_path: &Path) -> bool {
12 let dir_name = dir_path
13 .file_name()
14 .and_then(|name| name.to_str())
15 .unwrap_or("");
16
17 let doc_dirs = ["docs", "__docs__", ".docs"];
19
20 doc_dirs.contains(&dir_name.to_lowercase().as_str())
21}
22
23fn is_inside_documentation_directory(dir_path: &Path) -> bool {
24 for ancestor in dir_path.ancestors().skip(1) {
26 if is_documentation_directory(ancestor) {
27 return true;
28 }
29 }
30 false
31}
32
33fn is_direct_subfolder_of_features(dir_path: &Path) -> bool {
34 if let Some(parent) = dir_path.parent()
35 && let Some(parent_name) = parent.file_name().and_then(|name| name.to_str())
36 {
37 return parent_name == "features";
38 }
39 false
40}
41
42fn find_readme_file(dir_path: &Path) -> Option<std::path::PathBuf> {
43 let readme_candidates = ["README.md", "README.mdx"];
44
45 for candidate in &readme_candidates {
46 let readme_path = dir_path.join(candidate);
47 if readme_path.exists() {
48 return Some(readme_path);
49 }
50 }
51
52 None
53}
54
55fn has_feature_flag_in_readme(dir_path: &Path) -> bool {
57 if let Some(readme_path) = find_readme_file(dir_path)
58 && let Ok(content) = fs::read_to_string(&readme_path)
59 {
60 if let Some(stripped) = content.strip_prefix("---\n")
62 && let Some(end_pos) = stripped.find("\n---\n")
63 {
64 let yaml_content = &stripped[..end_pos];
65
66 if let Ok(yaml_value) = serde_yaml::from_str::<serde_yaml::Value>(yaml_content)
68 && let Some(mapping) = yaml_value.as_mapping()
69 {
70 if let Some(feature_value) =
72 mapping.get(serde_yaml::Value::String("feature".to_string()))
73 {
74 return feature_value.as_bool() == Some(true);
75 }
76 }
77 }
78 }
79 false
80}
81
82fn is_feature_directory(dir_path: &Path) -> bool {
84 if is_documentation_directory(dir_path) || is_inside_documentation_directory(dir_path) {
86 return false;
87 }
88
89 if is_direct_subfolder_of_features(dir_path) {
91 return true;
92 }
93
94 has_feature_flag_in_readme(dir_path)
96}
97
98pub fn list_files_recursive(dir: &Path) -> Result<Vec<Feature>> {
99 list_files_recursive_impl(dir, None)
100}
101
102pub fn list_files_recursive_with_changes(dir: &Path) -> Result<Vec<Feature>> {
103 let all_commits = get_all_commits_by_path(dir).unwrap_or_default();
105 list_files_recursive_impl(dir, Some(&all_commits))
106}
107
108fn read_decision_files(feature_path: &Path) -> Result<Vec<String>> {
109 let mut decisions = Vec::new();
110
111 let decision_paths = [
113 feature_path.join(".docs").join("decisions"),
114 feature_path.join("__docs__").join("decisions"),
115 ];
116
117 for decisions_dir in &decision_paths {
118 if decisions_dir.exists() && decisions_dir.is_dir() {
119 let entries = fs::read_dir(decisions_dir).with_context(|| {
120 format!(
121 "could not read decisions directory `{}`",
122 decisions_dir.display()
123 )
124 })?;
125
126 for entry in entries {
127 let entry = entry?;
128 let path = entry.path();
129
130 if path.is_file()
132 && let Some(file_name) = path.file_name()
133 {
134 let file_name_str = file_name.to_string_lossy();
135 if file_name_str.ends_with(".md") && file_name_str != "README.md" {
136 let content = fs::read_to_string(&path).with_context(|| {
137 format!("could not read decision file `{}`", path.display())
138 })?;
139 decisions.push(content);
140 }
141 }
142 }
143 break; }
145 }
146
147 Ok(decisions)
148}
149
150fn compute_stats_from_changes(changes: &[Change]) -> Option<Stats> {
152 if changes.is_empty() {
153 return None;
154 }
155
156 let mut commits = HashMap::new();
157
158 commits.insert(
160 "total_commits".to_string(),
161 serde_json::json!(changes.len()),
162 );
163
164 let mut authors_count: HashMap<String, usize> = HashMap::new();
166 for change in changes {
167 *authors_count.entry(change.author_name.clone()).or_insert(0) += 1;
168 }
169 commits.insert(
170 "authors_count".to_string(),
171 serde_json::json!(authors_count),
172 );
173
174 let mut count_by_type: HashMap<String, usize> = HashMap::new();
176 for change in changes {
177 let commit_type = extract_commit_type(&change.title);
178 *count_by_type.entry(commit_type).or_insert(0) += 1;
179 }
180 commits.insert(
181 "count_by_type".to_string(),
182 serde_json::json!(count_by_type),
183 );
184
185 if let Some(first) = changes.first() {
187 commits.insert(
188 "first_commit_date".to_string(),
189 serde_json::json!(first.date.clone()),
190 );
191 }
192 if let Some(last) = changes.last() {
193 commits.insert(
194 "last_commit_date".to_string(),
195 serde_json::json!(last.date.clone()),
196 );
197 }
198
199 Some(Stats { commits })
200}
201
202fn extract_commit_type(title: &str) -> String {
204 let known_types = [
206 "feat", "fix", "docs", "style", "refactor", "perf", "test", "build", "ci", "chore",
207 "revert",
208 ];
209
210 if let Some(colon_pos) = title.find(':') {
212 let prefix = &title[..colon_pos];
213
214 let type_part = if let Some(paren_pos) = prefix.find('(') {
216 &prefix[..paren_pos]
217 } else {
218 prefix
219 };
220
221 let type_part = type_part.trim().to_lowercase();
222
223 if known_types.contains(&type_part.as_str()) {
225 return type_part;
226 }
227 }
228
229 "other".to_string()
231}
232
233fn process_feature_directory(
234 path: &Path,
235 name: &str,
236 changes_map: Option<&HashMap<String, Vec<Change>>>,
237) -> Result<Feature> {
238 let (owner, description, mut meta) = if let Some(readme_path) = find_readme_file(path) {
240 read_readme_info(&readme_path)?
241 } else {
242 (
243 "Unknown".to_string(),
244 "".to_string(),
245 std::collections::HashMap::new(),
246 )
247 };
248
249 meta.remove("feature");
251
252 let changes = if let Some(map) = changes_map {
253 get_changes_for_path(path, map).unwrap_or_default()
255 } else {
256 Vec::new()
257 };
258
259 let decisions = read_decision_files(path).unwrap_or_default();
261
262 let nested_features_path = path.join("features");
264 let mut nested_features = if nested_features_path.exists() && nested_features_path.is_dir() {
265 list_files_recursive_impl(&nested_features_path, changes_map).unwrap_or_default()
266 } else {
267 Vec::new()
268 };
269
270 let entries = fs::read_dir(path)
272 .with_context(|| format!("could not read directory `{}`", path.display()))?;
273
274 let mut entries: Vec<_> = entries.collect::<Result<_, _>>()?;
275 entries.sort_by_key(|entry| entry.path());
276
277 for entry in entries {
278 let entry_path = entry.path();
279 let entry_name = entry_path.file_name().unwrap().to_string_lossy();
280
281 if entry_path.is_dir()
282 && entry_name != "features" && !is_documentation_directory(&entry_path)
284 {
285 if has_feature_flag_in_readme(&entry_path) {
286 let nested_feature =
288 process_feature_directory(&entry_path, &entry_name, changes_map)?;
289 nested_features.push(nested_feature);
290 } else {
291 let deeper_features = list_files_recursive_impl(&entry_path, changes_map)?;
294 nested_features.extend(deeper_features);
295 }
296 }
297 }
298
299 let stats = compute_stats_from_changes(&changes);
301
302 Ok(Feature {
303 name: name.to_string(),
304 description,
305 owner,
306 path: path.to_string_lossy().to_string(),
307 features: nested_features,
308 meta,
309 changes,
310 decisions,
311 stats,
312 })
313}
314
315fn list_files_recursive_impl(
316 dir: &Path,
317 changes_map: Option<&HashMap<String, Vec<Change>>>,
318) -> Result<Vec<Feature>> {
319 let entries = fs::read_dir(dir)
320 .with_context(|| format!("could not read directory `{}`", dir.display()))?;
321
322 let mut entries: Vec<_> = entries.collect::<Result<_, _>>()?;
323 entries.sort_by_key(|entry| entry.path());
324
325 let mut features: Vec<Feature> = Vec::new();
326
327 for entry in entries {
328 let path = entry.path();
329 let name = path.file_name().unwrap().to_string_lossy();
330
331 if path.is_dir() {
332 if is_feature_directory(&path) {
333 let feature = process_feature_directory(&path, &name, changes_map)?;
334 features.push(feature);
335 } else if !is_documentation_directory(&path)
336 && !is_inside_documentation_directory(&path)
337 {
338 let new_features = list_files_recursive_impl(&path, changes_map)?;
340 features.extend(new_features);
341 }
342 }
343 }
344
345 Ok(features)
346}
347
348fn get_changes_for_path(
350 path: &Path,
351 changes_map: &HashMap<String, Vec<Change>>,
352) -> Result<Vec<Change>> {
353 let canonical_path = std::fs::canonicalize(path)?;
355
356 let repo = Repository::discover(path)?;
358 let repo_workdir = repo
359 .workdir()
360 .context("repository has no working directory")?;
361
362 let relative_path = canonical_path
364 .strip_prefix(repo_workdir)
365 .context("path is not within repository")?;
366
367 let relative_path_str = relative_path.to_string_lossy().to_string();
368
369 Ok(changes_map
371 .get(&relative_path_str)
372 .cloned()
373 .unwrap_or_default())
374}
375
376#[cfg(test)]
377mod tests {
378 use super::*;
379
380 #[test]
381 fn test_extract_commit_type() {
382 assert_eq!(extract_commit_type("feat: add new feature"), "feat");
384 assert_eq!(extract_commit_type("fix: resolve bug"), "fix");
385 assert_eq!(extract_commit_type("docs: update README"), "docs");
386 assert_eq!(extract_commit_type("style: format code"), "style");
387 assert_eq!(
388 extract_commit_type("refactor: improve structure"),
389 "refactor"
390 );
391 assert_eq!(extract_commit_type("perf: optimize performance"), "perf");
392 assert_eq!(extract_commit_type("test: add unit tests"), "test");
393 assert_eq!(extract_commit_type("build: update dependencies"), "build");
394 assert_eq!(extract_commit_type("ci: fix CI pipeline"), "ci");
395 assert_eq!(extract_commit_type("chore: update gitignore"), "chore");
396 assert_eq!(
397 extract_commit_type("revert: undo previous commit"),
398 "revert"
399 );
400
401 assert_eq!(extract_commit_type("feat(auth): add login"), "feat");
403 assert_eq!(
404 extract_commit_type("fix(api): resolve endpoint issue"),
405 "fix"
406 );
407 assert_eq!(
408 extract_commit_type("docs(readme): update instructions"),
409 "docs"
410 );
411
412 assert_eq!(extract_commit_type("FEAT: uppercase type"), "feat");
414 assert_eq!(extract_commit_type("Fix: mixed case"), "fix");
415 assert_eq!(extract_commit_type("DOCS: all caps"), "docs");
416
417 assert_eq!(extract_commit_type("random commit message"), "other");
419 assert_eq!(extract_commit_type("update: not conventional"), "other");
420 assert_eq!(
421 extract_commit_type("feature: close but not standard"),
422 "other"
423 );
424 assert_eq!(extract_commit_type("no colon here"), "other");
425 assert_eq!(extract_commit_type(""), "other");
426
427 assert_eq!(extract_commit_type("feat:no space after colon"), "feat");
429 assert_eq!(extract_commit_type("feat : extra spaces"), "feat");
430 assert_eq!(
431 extract_commit_type("feat(scope)(weird): nested parens"),
432 "feat"
433 );
434 }
435}