1use anyhow::{Context, Result};
2use git2::Repository;
3use std::collections::HashMap;
4use std::fs;
5use std::path::Path;
6
7use crate::git_helper::get_all_commits_by_path;
8use crate::models::{Change, Feature, Stats};
9use crate::readme_parser::read_readme_info;
10
11fn is_documentation_directory(dir_path: &Path) -> bool {
12 let dir_name = dir_path
13 .file_name()
14 .and_then(|name| name.to_str())
15 .unwrap_or("");
16
17 let doc_dirs = ["docs", "__docs__", ".docs"];
19
20 doc_dirs.contains(&dir_name.to_lowercase().as_str())
21}
22
23fn is_inside_documentation_directory(dir_path: &Path) -> bool {
24 for ancestor in dir_path.ancestors().skip(1) {
26 if is_documentation_directory(ancestor) {
27 return true;
28 }
29 }
30 false
31}
32
33fn is_direct_subfolder_of_features(dir_path: &Path) -> bool {
34 if let Some(parent) = dir_path.parent()
35 && let Some(parent_name) = parent.file_name().and_then(|name| name.to_str())
36 {
37 return parent_name == "features";
38 }
39 false
40}
41
42fn find_readme_file(dir_path: &Path) -> Option<std::path::PathBuf> {
43 let readme_candidates = ["README.md", "README.mdx"];
44
45 for candidate in &readme_candidates {
46 let readme_path = dir_path.join(candidate);
47 if readme_path.exists() {
48 return Some(readme_path);
49 }
50 }
51
52 None
53}
54
55fn has_feature_flag_in_readme(dir_path: &Path) -> bool {
57 if let Some(readme_path) = find_readme_file(dir_path)
58 && let Ok(content) = fs::read_to_string(&readme_path)
59 {
60 if let Some(stripped) = content.strip_prefix("---\n")
62 && let Some(end_pos) = stripped.find("\n---\n")
63 {
64 let yaml_content = &stripped[..end_pos];
65
66 if let Ok(yaml_value) = serde_yaml::from_str::<serde_yaml::Value>(yaml_content)
68 && let Some(mapping) = yaml_value.as_mapping()
69 {
70 if let Some(feature_value) =
72 mapping.get(serde_yaml::Value::String("feature".to_string()))
73 {
74 return feature_value.as_bool() == Some(true);
75 }
76 }
77 }
78 }
79 false
80}
81
82fn is_feature_directory(dir_path: &Path) -> bool {
84 if is_documentation_directory(dir_path) || is_inside_documentation_directory(dir_path) {
86 return false;
87 }
88
89 if is_direct_subfolder_of_features(dir_path) {
91 return true;
92 }
93
94 has_feature_flag_in_readme(dir_path)
96}
97
98pub fn list_files_recursive(dir: &Path) -> Result<Vec<Feature>> {
99 list_files_recursive_impl(dir, dir, None, None)
100}
101
102pub fn list_files_recursive_with_changes(dir: &Path) -> Result<Vec<Feature>> {
103 let all_commits = get_all_commits_by_path(dir).unwrap_or_default();
105 list_files_recursive_impl(dir, dir, Some(&all_commits), None)
106}
107
108fn read_decision_files(feature_path: &Path) -> Result<Vec<String>> {
109 let mut decisions = Vec::new();
110
111 let decision_paths = [
113 feature_path.join(".docs").join("decisions"),
114 feature_path.join("__docs__").join("decisions"),
115 ];
116
117 for decisions_dir in &decision_paths {
118 if decisions_dir.exists() && decisions_dir.is_dir() {
119 let entries = fs::read_dir(decisions_dir).with_context(|| {
120 format!(
121 "could not read decisions directory `{}`",
122 decisions_dir.display()
123 )
124 })?;
125
126 for entry in entries {
127 let entry = entry?;
128 let path = entry.path();
129
130 if path.is_file()
132 && let Some(file_name) = path.file_name()
133 {
134 let file_name_str = file_name.to_string_lossy();
135 if file_name_str.ends_with(".md") && file_name_str != "README.md" {
136 let content = fs::read_to_string(&path).with_context(|| {
137 format!("could not read decision file `{}`", path.display())
138 })?;
139 decisions.push(content);
140 }
141 }
142 }
143 break; }
145 }
146
147 Ok(decisions)
148}
149
150fn count_files(feature_path: &Path, nested_feature_paths: &[String]) -> usize {
152 let mut file_count = 0;
153
154 if let Ok(entries) = fs::read_dir(feature_path) {
155 for entry in entries.flatten() {
156 let path = entry.path();
157 let path_str = path.to_string_lossy().to_string();
158
159 if is_documentation_directory(&path) {
161 continue;
162 }
163
164 if nested_feature_paths
166 .iter()
167 .any(|nfp| path_str.starts_with(nfp))
168 {
169 continue;
170 }
171
172 if path.is_file() {
173 file_count += 1;
174 } else if path.is_dir() {
175 file_count += count_files(&path, nested_feature_paths);
177 }
178 }
179 }
180
181 file_count
182}
183
184fn count_lines(feature_path: &Path, nested_feature_paths: &[String]) -> usize {
186 let mut line_count = 0;
187
188 if let Ok(entries) = fs::read_dir(feature_path) {
189 for entry in entries.flatten() {
190 let path = entry.path();
191 let path_str = path.to_string_lossy().to_string();
192
193 if is_documentation_directory(&path) {
195 continue;
196 }
197
198 if nested_feature_paths
200 .iter()
201 .any(|nfp| path_str.starts_with(nfp))
202 {
203 continue;
204 }
205
206 if path.is_file() {
207 if let Ok(content) = fs::read_to_string(&path) {
209 line_count += content.lines().count();
210 }
211 } else if path.is_dir() {
212 line_count += count_lines(&path, nested_feature_paths);
214 }
215 }
216 }
217
218 line_count
219}
220
221fn count_todos(feature_path: &Path, nested_feature_paths: &[String]) -> usize {
223 let mut todo_count = 0;
224
225 if let Ok(entries) = fs::read_dir(feature_path) {
226 for entry in entries.flatten() {
227 let path = entry.path();
228 let path_str = path.to_string_lossy().to_string();
229
230 if is_documentation_directory(&path) {
232 continue;
233 }
234
235 if nested_feature_paths
237 .iter()
238 .any(|nfp| path_str.starts_with(nfp))
239 {
240 continue;
241 }
242
243 if path.is_file() {
244 if let Ok(content) = fs::read_to_string(&path) {
246 for line in content.lines() {
247 let line_upper = line.to_uppercase();
249 if line_upper.contains("TODO") {
250 todo_count += 1;
251 }
252 }
253 }
254 } else if path.is_dir() {
255 todo_count += count_todos(&path, nested_feature_paths);
257 }
258 }
259 }
260
261 todo_count
262}
263
264fn get_commit_affected_paths(repo: &Repository, commit_hash: &str) -> Vec<String> {
266 let Ok(oid) = git2::Oid::from_str(commit_hash) else {
267 return Vec::new();
268 };
269
270 let Ok(commit) = repo.find_commit(oid) else {
271 return Vec::new();
272 };
273
274 let mut paths = Vec::new();
275
276 if commit.parent_count() == 0 {
278 if let Ok(tree) = commit.tree() {
279 collect_all_tree_paths(repo, &tree, "", &mut paths);
280 }
281 return paths;
282 }
283
284 let Ok(tree) = commit.tree() else {
286 return Vec::new();
287 };
288
289 let Ok(parent) = commit.parent(0) else {
290 return Vec::new();
291 };
292
293 let Ok(parent_tree) = parent.tree() else {
294 return Vec::new();
295 };
296
297 if let Ok(diff) = repo.diff_tree_to_tree(Some(&parent_tree), Some(&tree), None) {
298 let _ = diff.foreach(
299 &mut |delta, _| {
300 if let Some(path) = delta.new_file().path()
301 && let Some(path_str) = path.to_str()
302 {
303 paths.push(path_str.to_string());
304 }
305 if let Some(path) = delta.old_file().path()
306 && let Some(path_str) = path.to_str()
307 && !paths.contains(&path_str.to_string())
308 {
309 paths.push(path_str.to_string());
310 }
311 true
312 },
313 None,
314 None,
315 None,
316 );
317 }
318
319 paths
320}
321
322fn collect_all_tree_paths(
324 repo: &Repository,
325 tree: &git2::Tree,
326 prefix: &str,
327 paths: &mut Vec<String>,
328) {
329 for entry in tree.iter() {
330 if let Some(name) = entry.name() {
331 let path = if prefix.is_empty() {
332 name.to_string()
333 } else {
334 format!("{}/{}", prefix, name)
335 };
336
337 paths.push(path.clone());
338
339 if entry.kind() == Some(git2::ObjectType::Tree)
340 && let Ok(obj) = entry.to_object(repo)
341 && let Ok(subtree) = obj.peel_to_tree()
342 {
343 collect_all_tree_paths(repo, &subtree, &path, paths);
344 }
345 }
346 }
347}
348
349fn compute_stats_from_changes(
351 changes: &[Change],
352 feature_path: &Path,
353 nested_features: &[Feature],
354) -> Option<Stats> {
355 if changes.is_empty() {
356 return None;
357 }
358
359 let nested_feature_paths: Vec<String> =
361 nested_features.iter().map(|f| f.path.clone()).collect();
362
363 let repo = Repository::discover(feature_path).ok();
365
366 let feature_relative_path = if let Some(ref r) = repo {
368 if let Ok(canonical_path) = std::fs::canonicalize(feature_path) {
369 if let Some(workdir) = r.workdir() {
370 canonical_path
371 .strip_prefix(workdir)
372 .ok()
373 .map(|p| p.to_string_lossy().to_string())
374 } else {
375 None
376 }
377 } else {
378 None
379 }
380 } else {
381 None
382 };
383
384 let filtered_changes: Vec<&Change> = changes
387 .iter()
388 .filter(|change| {
389 let Some(ref r) = repo else {
391 return true;
392 };
393
394 let Some(ref feature_rel_path) = feature_relative_path else {
395 return true;
396 };
397
398 let affected_files = get_commit_affected_paths(r, &change.hash);
400
401 affected_files.iter().any(|file_path| {
403 let in_feature = file_path.starts_with(feature_rel_path);
405
406 let in_nested = nested_feature_paths.iter().any(|nested_path| {
408 if let Ok(nested_canonical) = std::fs::canonicalize(nested_path)
410 && let Some(workdir) = r.workdir()
411 && let Ok(nested_rel) = nested_canonical.strip_prefix(workdir)
412 {
413 let nested_rel_str = nested_rel.to_string_lossy();
414 return file_path.starts_with(nested_rel_str.as_ref());
415 }
416 false
417 });
418
419 in_feature && !in_nested
420 })
421 })
422 .collect();
423
424 let mut commits = HashMap::new();
425
426 commits.insert(
428 "total_commits".to_string(),
429 serde_json::json!(filtered_changes.len()),
430 );
431
432 let mut authors_count: HashMap<String, usize> = HashMap::new();
434 for change in &filtered_changes {
435 *authors_count.entry(change.author_name.clone()).or_insert(0) += 1;
436 }
437 commits.insert(
438 "authors_count".to_string(),
439 serde_json::json!(authors_count),
440 );
441
442 let mut count_by_type: HashMap<String, usize> = HashMap::new();
444 for change in &filtered_changes {
445 let commit_type = extract_commit_type(&change.title);
446 *count_by_type.entry(commit_type).or_insert(0) += 1;
447 }
448 commits.insert(
449 "count_by_type".to_string(),
450 serde_json::json!(count_by_type),
451 );
452
453 if let Some(first) = filtered_changes.first() {
455 commits.insert(
456 "first_commit_date".to_string(),
457 serde_json::json!(first.date.clone()),
458 );
459 }
460 if let Some(last) = filtered_changes.last() {
461 commits.insert(
462 "last_commit_date".to_string(),
463 serde_json::json!(last.date.clone()),
464 );
465 }
466
467 let files_count = count_files(feature_path, &nested_feature_paths);
469 let lines_count = count_lines(feature_path, &nested_feature_paths);
470 let todos_count = count_todos(feature_path, &nested_feature_paths);
471
472 Some(Stats {
473 files_count: Some(files_count),
474 lines_count: Some(lines_count),
475 todos_count: Some(todos_count),
476 commits,
477 coverage: None,
478 })
479}
480
481fn extract_commit_type(title: &str) -> String {
483 let known_types = [
485 "feat", "fix", "docs", "style", "refactor", "perf", "test", "build", "ci", "chore",
486 "revert",
487 ];
488
489 if let Some(colon_pos) = title.find(':') {
491 let prefix = &title[..colon_pos];
492
493 let type_part = if let Some(paren_pos) = prefix.find('(') {
495 &prefix[..paren_pos]
496 } else {
497 prefix
498 };
499
500 let type_part = type_part.trim().to_lowercase();
501
502 if known_types.contains(&type_part.as_str()) {
504 return type_part;
505 }
506 }
507
508 "other".to_string()
510}
511
512fn process_feature_directory(
513 path: &Path,
514 base_path: &Path,
515 name: &str,
516 changes_map: Option<&HashMap<String, Vec<Change>>>,
517 parent_owner: Option<&str>,
518) -> Result<Feature> {
519 let mut readme_info = if let Some(readme_path) = find_readme_file(path) {
521 read_readme_info(&readme_path)?
522 } else {
523 use crate::readme_parser::ReadmeInfo;
524 ReadmeInfo {
525 title: None,
526 owner: "".to_string(),
527 description: "".to_string(),
528 meta: std::collections::HashMap::new(),
529 }
530 };
531
532 readme_info.meta.remove("feature");
534
535 let changes = if let Some(map) = changes_map {
536 get_changes_for_path(path, map).unwrap_or_default()
538 } else {
539 Vec::new()
540 };
541
542 let decisions = read_decision_files(path).unwrap_or_default();
544
545 let (actual_owner, is_owner_inherited) = if readme_info.owner.is_empty() {
547 if let Some(parent) = parent_owner {
548 (parent.to_string(), true)
549 } else {
550 ("".to_string(), false)
551 }
552 } else {
553 (readme_info.owner.clone(), false)
554 };
555
556 let nested_features_path = path.join("features");
558 let mut nested_features = if nested_features_path.exists() && nested_features_path.is_dir() {
559 list_files_recursive_impl(
560 &nested_features_path,
561 base_path,
562 changes_map,
563 Some(&actual_owner),
564 )
565 .unwrap_or_default()
566 } else {
567 Vec::new()
568 };
569
570 let entries = fs::read_dir(path)
572 .with_context(|| format!("could not read directory `{}`", path.display()))?;
573
574 let mut entries: Vec<_> = entries.collect::<Result<_, _>>()?;
575 entries.sort_by_key(|entry| entry.path());
576
577 for entry in entries {
578 let entry_path = entry.path();
579 let entry_name = entry_path.file_name().unwrap().to_string_lossy();
580
581 if entry_path.is_dir()
582 && entry_name != "features" && !is_documentation_directory(&entry_path)
584 {
585 if has_feature_flag_in_readme(&entry_path) {
586 let nested_feature = process_feature_directory(
588 &entry_path,
589 base_path,
590 &entry_name,
591 changes_map,
592 Some(&actual_owner),
593 )?;
594 nested_features.push(nested_feature);
595 } else {
596 let deeper_features = list_files_recursive_impl(
599 &entry_path,
600 base_path,
601 changes_map,
602 Some(&actual_owner),
603 )?;
604 nested_features.extend(deeper_features);
605 }
606 }
607 }
608
609 let stats = compute_stats_from_changes(&changes, path, &nested_features);
611
612 let relative_path = path
614 .strip_prefix(base_path)
615 .unwrap_or(path)
616 .to_string_lossy()
617 .to_string();
618
619 Ok(Feature {
620 name: readme_info.title.unwrap_or_else(|| name.to_string()),
621 description: readme_info.description,
622 owner: actual_owner,
623 is_owner_inherited,
624 path: relative_path,
625 features: nested_features,
626 meta: readme_info.meta,
627 changes,
628 decisions,
629 stats,
630 })
631}
632
633fn list_files_recursive_impl(
634 dir: &Path,
635 base_path: &Path,
636 changes_map: Option<&HashMap<String, Vec<Change>>>,
637 parent_owner: Option<&str>,
638) -> Result<Vec<Feature>> {
639 let entries = fs::read_dir(dir)
640 .with_context(|| format!("could not read directory `{}`", dir.display()))?;
641
642 let mut entries: Vec<_> = entries.collect::<Result<_, _>>()?;
643 entries.sort_by_key(|entry| entry.path());
644
645 let mut features: Vec<Feature> = Vec::new();
646
647 for entry in entries {
648 let path = entry.path();
649 let name = path.file_name().unwrap().to_string_lossy();
650
651 if path.is_dir() {
652 if is_feature_directory(&path) {
653 let feature =
654 process_feature_directory(&path, base_path, &name, changes_map, parent_owner)?;
655 features.push(feature);
656 } else if !is_documentation_directory(&path)
657 && !is_inside_documentation_directory(&path)
658 {
659 let new_features =
661 list_files_recursive_impl(&path, base_path, changes_map, parent_owner)?;
662 features.extend(new_features);
663 }
664 }
665 }
666
667 Ok(features)
668}
669
670fn get_changes_for_path(
672 path: &Path,
673 changes_map: &HashMap<String, Vec<Change>>,
674) -> Result<Vec<Change>> {
675 let canonical_path = std::fs::canonicalize(path)?;
677
678 let repo = Repository::discover(path)?;
680 let repo_workdir = repo
681 .workdir()
682 .context("repository has no working directory")?;
683
684 let relative_path = canonical_path
686 .strip_prefix(repo_workdir)
687 .context("path is not within repository")?;
688
689 let relative_path_str = relative_path.to_string_lossy().to_string();
690
691 Ok(changes_map
693 .get(&relative_path_str)
694 .cloned()
695 .unwrap_or_default())
696}
697
698#[cfg(test)]
699mod tests {
700 use super::*;
701
702 #[test]
703 fn test_extract_commit_type() {
704 assert_eq!(extract_commit_type("feat: add new feature"), "feat");
706 assert_eq!(extract_commit_type("fix: resolve bug"), "fix");
707 assert_eq!(extract_commit_type("docs: update README"), "docs");
708 assert_eq!(extract_commit_type("style: format code"), "style");
709 assert_eq!(
710 extract_commit_type("refactor: improve structure"),
711 "refactor"
712 );
713 assert_eq!(extract_commit_type("perf: optimize performance"), "perf");
714 assert_eq!(extract_commit_type("test: add unit tests"), "test");
715 assert_eq!(extract_commit_type("build: update dependencies"), "build");
716 assert_eq!(extract_commit_type("ci: fix CI pipeline"), "ci");
717 assert_eq!(extract_commit_type("chore: update gitignore"), "chore");
718 assert_eq!(
719 extract_commit_type("revert: undo previous commit"),
720 "revert"
721 );
722
723 assert_eq!(extract_commit_type("feat(auth): add login"), "feat");
725 assert_eq!(
726 extract_commit_type("fix(api): resolve endpoint issue"),
727 "fix"
728 );
729 assert_eq!(
730 extract_commit_type("docs(readme): update instructions"),
731 "docs"
732 );
733
734 assert_eq!(extract_commit_type("FEAT: uppercase type"), "feat");
736 assert_eq!(extract_commit_type("Fix: mixed case"), "fix");
737 assert_eq!(extract_commit_type("DOCS: all caps"), "docs");
738
739 assert_eq!(extract_commit_type("random commit message"), "other");
741 assert_eq!(extract_commit_type("update: not conventional"), "other");
742 assert_eq!(
743 extract_commit_type("feature: close but not standard"),
744 "other"
745 );
746 assert_eq!(extract_commit_type("no colon here"), "other");
747 assert_eq!(extract_commit_type(""), "other");
748
749 assert_eq!(extract_commit_type("feat:no space after colon"), "feat");
751 assert_eq!(extract_commit_type("feat : extra spaces"), "feat");
752 assert_eq!(
753 extract_commit_type("feat(scope)(weird): nested parens"),
754 "feat"
755 );
756 }
757}