Skip to main content

features_cli/
file_scanner.rs

1use anyhow::{Context, Result};
2use git2::Repository;
3use std::collections::HashMap;
4use std::fs;
5use std::path::Path;
6
7use crate::dependency_resolver::{
8    build_file_to_feature_map, collect_feature_info, resolve_feature_dependencies,
9};
10use crate::feature_metadata_detector::{self, FeatureMetadataMap};
11use crate::git_helper::get_all_commits_by_path;
12use crate::import_detector::{ImportStatement, build_file_map, scan_file_for_imports};
13use crate::models::{Change, Feature, Stats};
14use crate::readme_parser::read_readme_info;
15
16fn is_documentation_directory(dir_path: &Path) -> bool {
17    let dir_name = dir_path
18        .file_name()
19        .and_then(|name| name.to_str())
20        .unwrap_or("");
21
22    // Common documentation directory names
23    let doc_dirs = ["docs", "__docs__", ".docs"];
24
25    doc_dirs.contains(&dir_name.to_lowercase().as_str())
26}
27
28fn is_inside_documentation_directory(dir_path: &Path) -> bool {
29    // Check if any parent directory is a documentation directory
30    for ancestor in dir_path.ancestors().skip(1) {
31        if is_documentation_directory(ancestor) {
32            return true;
33        }
34    }
35    false
36}
37
38fn is_direct_subfolder_of_features(dir_path: &Path) -> bool {
39    if let Some(parent) = dir_path.parent()
40        && let Some(parent_name) = parent.file_name().and_then(|name| name.to_str())
41    {
42        return parent_name == "features";
43    }
44    false
45}
46
47fn find_readme_file(dir_path: &Path) -> Option<std::path::PathBuf> {
48    let readme_candidates = ["README.md", "README.mdx"];
49
50    for candidate in &readme_candidates {
51        let readme_path = dir_path.join(candidate);
52        if readme_path.exists() {
53            return Some(readme_path);
54        }
55    }
56
57    None
58}
59
60/// Check if a directory has a README with `feature: true` in front matter
61fn has_feature_flag_in_readme(dir_path: &Path) -> bool {
62    if let Some(readme_path) = find_readme_file(dir_path)
63        && let Ok(content) = fs::read_to_string(&readme_path)
64    {
65        // Check if content starts with YAML front matter (---)
66        if let Some(stripped) = content.strip_prefix("---\n")
67            && let Some(end_pos) = stripped.find("\n---\n")
68        {
69            let yaml_content = &stripped[..end_pos];
70
71            // Parse YAML front matter
72            if let Ok(yaml_value) = serde_yaml::from_str::<serde_yaml::Value>(yaml_content)
73                && let Some(mapping) = yaml_value.as_mapping()
74            {
75                // Check for feature: true
76                if let Some(feature_value) =
77                    mapping.get(serde_yaml::Value::String("feature".to_string()))
78                {
79                    return feature_value.as_bool() == Some(true);
80                }
81            }
82        }
83    }
84    false
85}
86
87/// Check if a directory should be treated as a feature
88fn is_feature_directory(dir_path: &Path) -> bool {
89    // Skip documentation directories
90    if is_documentation_directory(dir_path) || is_inside_documentation_directory(dir_path) {
91        return false;
92    }
93
94    // Check if it's a direct subfolder of "features" (existing behavior)
95    if is_direct_subfolder_of_features(dir_path) {
96        return true;
97    }
98
99    // Check if the directory has a README with feature: true
100    has_feature_flag_in_readme(dir_path)
101}
102
103pub fn list_files_recursive(dir: &Path) -> Result<Vec<Feature>> {
104    // Scan entire base_path for feature metadata once
105    let feature_metadata =
106        feature_metadata_detector::scan_directory_for_feature_metadata(dir).unwrap_or_default();
107
108    // First pass: build feature structure without dependencies
109    let mut features = list_files_recursive_impl(dir, dir, None, None, &feature_metadata)?;
110
111    // Second pass: scan for imports and resolve dependencies
112    populate_dependencies(&mut features, dir)?;
113
114    Ok(features)
115}
116
117pub fn list_files_recursive_with_changes(dir: &Path) -> Result<Vec<Feature>> {
118    // Get all commits once at the beginning for efficiency
119    let all_commits = get_all_commits_by_path(dir).unwrap_or_default();
120    // Scan entire base_path for feature metadata once
121    let feature_metadata =
122        feature_metadata_detector::scan_directory_for_feature_metadata(dir).unwrap_or_default();
123
124    // First pass: build feature structure without dependencies
125    let mut features =
126        list_files_recursive_impl(dir, dir, Some(&all_commits), None, &feature_metadata)?;
127
128    // Second pass: scan for imports and resolve dependencies
129    populate_dependencies(&mut features, dir)?;
130
131    Ok(features)
132}
133
134/// Populate dependencies for all features by scanning imports
135fn populate_dependencies(features: &mut [Feature], base_path: &Path) -> Result<()> {
136    // Build file map for quick path resolution
137    let file_map = build_file_map(base_path);
138
139    // Collect all feature info (flat list with paths)
140    let mut feature_info_list = Vec::new();
141    collect_feature_info(features, None, &mut feature_info_list);
142
143    // Build file-to-feature mapping
144    let file_to_feature_map = build_file_to_feature_map(&feature_info_list, base_path);
145
146    // Build feature path to name mapping (path is the unique identifier)
147    let mut feature_path_to_name_map = HashMap::new();
148    for info in &feature_info_list {
149        feature_path_to_name_map.insert(info.path.to_string_lossy().to_string(), info.name.clone());
150    }
151
152    // Scan all files in each feature for imports
153    let mut feature_imports: HashMap<String, Vec<ImportStatement>> = HashMap::new();
154
155    for feature_info in &feature_info_list {
156        let feature_path = base_path.join(&feature_info.path);
157        let imports = scan_feature_directory_for_imports(&feature_path);
158        feature_imports.insert(feature_info.name.clone(), imports);
159    }
160
161    // Now populate dependencies in the feature tree
162    populate_dependencies_recursive(
163        features,
164        base_path,
165        &feature_imports,
166        &file_to_feature_map,
167        &feature_path_to_name_map,
168        &file_map,
169    );
170
171    Ok(())
172}
173
174/// Scan a feature directory for all import statements
175fn scan_feature_directory_for_imports(feature_path: &Path) -> Vec<ImportStatement> {
176    let mut all_imports = Vec::new();
177
178    if let Ok(entries) = fs::read_dir(feature_path) {
179        for entry in entries.flatten() {
180            let path = entry.path();
181
182            // Skip documentation directories
183            if is_documentation_directory(&path) {
184                continue;
185            }
186
187            if path.is_file() {
188                if let Ok(imports) = scan_file_for_imports(&path) {
189                    all_imports.extend(imports);
190                }
191            } else if path.is_dir() {
192                // Skip 'features' subdirectory (contains nested features)
193                let dir_name = path.file_name().and_then(|n| n.to_str()).unwrap_or("");
194                if dir_name == "features" {
195                    continue;
196                }
197
198                // Recursively scan subdirectories (but not nested features with readme flag)
199                if !has_feature_flag_in_readme(&path) {
200                    let nested_imports = scan_feature_directory_for_imports(&path);
201                    all_imports.extend(nested_imports);
202                }
203            }
204        }
205    }
206
207    all_imports
208}
209
210/// Recursively populate dependencies in the feature tree
211fn populate_dependencies_recursive(
212    features: &mut [Feature],
213    base_path: &Path,
214    feature_imports: &HashMap<String, Vec<ImportStatement>>,
215    file_to_feature_map: &HashMap<std::path::PathBuf, String>,
216    feature_path_to_name_map: &HashMap<String, String>,
217    file_map: &HashMap<String, std::path::PathBuf>,
218) {
219    for feature in features {
220        // Get imports for this feature
221        if let Some(imports) = feature_imports.get(&feature.name) {
222            let feature_path = std::path::PathBuf::from(&feature.path);
223
224            // Resolve dependencies
225            let dependencies = resolve_feature_dependencies(
226                &feature.name,
227                &feature_path,
228                base_path,
229                imports,
230                file_to_feature_map,
231                feature_path_to_name_map,
232                file_map,
233            );
234
235            feature.dependencies = dependencies;
236        }
237
238        // Recursively process nested features
239        if !feature.features.is_empty() {
240            populate_dependencies_recursive(
241                &mut feature.features,
242                base_path,
243                feature_imports,
244                file_to_feature_map,
245                feature_path_to_name_map,
246                file_map,
247            );
248        }
249    }
250}
251
252fn read_decision_files(feature_path: &Path) -> Result<Vec<String>> {
253    let mut decisions = Vec::new();
254
255    // Check both "decision" and "decisions" folder names
256    let decision_paths = [
257        feature_path.join(".docs").join("decisions"),
258        feature_path.join("__docs__").join("decisions"),
259    ];
260
261    for decisions_dir in &decision_paths {
262        if decisions_dir.exists() && decisions_dir.is_dir() {
263            let entries = fs::read_dir(decisions_dir).with_context(|| {
264                format!(
265                    "could not read decisions directory `{}`",
266                    decisions_dir.display()
267                )
268            })?;
269
270            for entry in entries {
271                let entry = entry?;
272                let path = entry.path();
273
274                // Skip README.md files and only process .md files
275                if path.is_file()
276                    && let Some(file_name) = path.file_name()
277                {
278                    let file_name_str = file_name.to_string_lossy();
279                    if file_name_str.ends_with(".md") && file_name_str != "README.md" {
280                        let content = fs::read_to_string(&path).with_context(|| {
281                            format!("could not read decision file `{}`", path.display())
282                        })?;
283                        decisions.push(content);
284                    }
285                }
286            }
287            break; // If we found one of the directories, don't check the other
288        }
289    }
290
291    Ok(decisions)
292}
293
294/// Count the number of files in a feature directory (excluding documentation)
295fn count_files(feature_path: &Path, nested_feature_paths: &[String]) -> usize {
296    let mut file_count = 0;
297
298    if let Ok(entries) = fs::read_dir(feature_path) {
299        for entry in entries.flatten() {
300            let path = entry.path();
301            let path_str = path.to_string_lossy().to_string();
302
303            // Skip documentation directories
304            if is_documentation_directory(&path) {
305                continue;
306            }
307
308            // Skip nested feature directories
309            if nested_feature_paths
310                .iter()
311                .any(|nfp| path_str.starts_with(nfp))
312            {
313                continue;
314            }
315
316            if path.is_file() {
317                file_count += 1;
318            } else if path.is_dir() {
319                // Recursively count files in subdirectories
320                file_count += count_files(&path, nested_feature_paths);
321            }
322        }
323    }
324
325    file_count
326}
327
328/// Count the total number of lines in all files in a feature directory (excluding documentation)
329fn count_lines(feature_path: &Path, nested_feature_paths: &[String]) -> usize {
330    let mut line_count = 0;
331
332    if let Ok(entries) = fs::read_dir(feature_path) {
333        for entry in entries.flatten() {
334            let path = entry.path();
335            let path_str = path.to_string_lossy().to_string();
336
337            // Skip documentation directories
338            if is_documentation_directory(&path) {
339                continue;
340            }
341
342            // Skip nested feature directories
343            if nested_feature_paths
344                .iter()
345                .any(|nfp| path_str.starts_with(nfp))
346            {
347                continue;
348            }
349
350            if path.is_file() {
351                // Try to read the file and count lines
352                if let Ok(content) = fs::read_to_string(&path) {
353                    line_count += content.lines().count();
354                }
355            } else if path.is_dir() {
356                // Recursively count lines in subdirectories
357                line_count += count_lines(&path, nested_feature_paths);
358            }
359        }
360    }
361
362    line_count
363}
364
365/// Count the total number of TODO comments in all files in a feature directory (excluding documentation)
366fn count_todos(feature_path: &Path, nested_feature_paths: &[String]) -> usize {
367    let mut todo_count = 0;
368
369    if let Ok(entries) = fs::read_dir(feature_path) {
370        for entry in entries.flatten() {
371            let path = entry.path();
372            let path_str = path.to_string_lossy().to_string();
373
374            // Skip documentation directories
375            if is_documentation_directory(&path) {
376                continue;
377            }
378
379            // Skip nested feature directories
380            if nested_feature_paths
381                .iter()
382                .any(|nfp| path_str.starts_with(nfp))
383            {
384                continue;
385            }
386
387            if path.is_file() {
388                // Try to read the file and count TODO comments
389                if let Ok(content) = fs::read_to_string(&path) {
390                    for line in content.lines() {
391                        // Look for TODO in comments (case-insensitive)
392                        let line_upper = line.to_uppercase();
393                        if line_upper.contains("TODO") {
394                            todo_count += 1;
395                        }
396                    }
397                }
398            } else if path.is_dir() {
399                // Recursively count TODOs in subdirectories
400                todo_count += count_todos(&path, nested_feature_paths);
401            }
402        }
403    }
404
405    todo_count
406}
407
408/// Get the paths affected by a specific commit
409fn get_commit_affected_paths(repo: &Repository, commit_hash: &str) -> Vec<String> {
410    let Ok(oid) = git2::Oid::from_str(commit_hash) else {
411        return Vec::new();
412    };
413
414    let Ok(commit) = repo.find_commit(oid) else {
415        return Vec::new();
416    };
417
418    let mut paths = Vec::new();
419
420    // For the first commit (no parents), get all files in the tree
421    if commit.parent_count() == 0 {
422        if let Ok(tree) = commit.tree() {
423            collect_all_tree_paths(repo, &tree, "", &mut paths);
424        }
425        return paths;
426    }
427
428    // For commits with parents, check the diff
429    let Ok(tree) = commit.tree() else {
430        return Vec::new();
431    };
432
433    let Ok(parent) = commit.parent(0) else {
434        return Vec::new();
435    };
436
437    let Ok(parent_tree) = parent.tree() else {
438        return Vec::new();
439    };
440
441    if let Ok(diff) = repo.diff_tree_to_tree(Some(&parent_tree), Some(&tree), None) {
442        let _ = diff.foreach(
443            &mut |delta, _| {
444                if let Some(path) = delta.new_file().path()
445                    && let Some(path_str) = path.to_str()
446                {
447                    paths.push(path_str.to_string());
448                }
449                if let Some(path) = delta.old_file().path()
450                    && let Some(path_str) = path.to_str()
451                    && !paths.contains(&path_str.to_string())
452                {
453                    paths.push(path_str.to_string());
454                }
455                true
456            },
457            None,
458            None,
459            None,
460        );
461    }
462
463    paths
464}
465
466/// Collect all file paths in a tree (helper for get_commit_affected_paths)
467fn collect_all_tree_paths(
468    repo: &Repository,
469    tree: &git2::Tree,
470    prefix: &str,
471    paths: &mut Vec<String>,
472) {
473    for entry in tree.iter() {
474        if let Some(name) = entry.name() {
475            let path = if prefix.is_empty() {
476                name.to_string()
477            } else {
478                format!("{}/{}", prefix, name)
479            };
480
481            paths.push(path.clone());
482
483            if entry.kind() == Some(git2::ObjectType::Tree)
484                && let Ok(obj) = entry.to_object(repo)
485                && let Ok(subtree) = obj.peel_to_tree()
486            {
487                collect_all_tree_paths(repo, &subtree, &path, paths);
488            }
489        }
490    }
491}
492
493/// Compute statistics from changes for a feature
494fn compute_stats_from_changes(
495    changes: &[Change],
496    feature_path: &Path,
497    nested_features: &[Feature],
498) -> Option<Stats> {
499    if changes.is_empty() {
500        return None;
501    }
502
503    // Collect paths of nested features to exclude from commit counts
504    let nested_feature_paths: Vec<String> =
505        nested_features.iter().map(|f| f.path.clone()).collect();
506
507    // Get repository to check commit details
508    let repo = Repository::discover(feature_path).ok();
509
510    // Get the feature's relative path from repo root
511    let feature_relative_path = if let Some(ref r) = repo {
512        if let Ok(canonical_path) = std::fs::canonicalize(feature_path) {
513            if let Some(workdir) = r.workdir() {
514                canonical_path
515                    .strip_prefix(workdir)
516                    .ok()
517                    .map(|p| p.to_string_lossy().to_string())
518            } else {
519                None
520            }
521        } else {
522            None
523        }
524    } else {
525        None
526    };
527
528    // Filter changes to only include those that affect files in this feature
529    // (not exclusively in nested features)
530    let filtered_changes: Vec<&Change> = changes
531        .iter()
532        .filter(|change| {
533            // If we don't have repo access, include all changes
534            let Some(ref r) = repo else {
535                return true;
536            };
537
538            let Some(ref feature_rel_path) = feature_relative_path else {
539                return true;
540            };
541
542            // Get the files affected by this commit
543            let affected_files = get_commit_affected_paths(r, &change.hash);
544
545            // Check if any affected file is in this feature but not in a nested feature
546            affected_files.iter().any(|file_path| {
547                // File must be in this feature
548                let in_feature = file_path.starts_with(feature_rel_path);
549
550                // File must not be exclusively in a nested feature
551                let in_nested = nested_feature_paths.iter().any(|nested_path| {
552                    // Convert nested absolute path to relative path
553                    if let Ok(nested_canonical) = std::fs::canonicalize(nested_path)
554                        && let Some(workdir) = r.workdir()
555                        && let Ok(nested_rel) = nested_canonical.strip_prefix(workdir)
556                    {
557                        let nested_rel_str = nested_rel.to_string_lossy();
558                        return file_path.starts_with(nested_rel_str.as_ref());
559                    }
560                    false
561                });
562
563                in_feature && !in_nested
564            })
565        })
566        .collect();
567
568    let mut commits = HashMap::new();
569
570    // Add total commit count
571    commits.insert(
572        "total_commits".to_string(),
573        serde_json::json!(filtered_changes.len()),
574    );
575
576    // Count commits by author
577    let mut authors_count: HashMap<String, usize> = HashMap::new();
578    for change in &filtered_changes {
579        *authors_count.entry(change.author_name.clone()).or_insert(0) += 1;
580    }
581    commits.insert(
582        "authors_count".to_string(),
583        serde_json::json!(authors_count),
584    );
585
586    // Count commits by conventional commit type
587    let mut count_by_type: HashMap<String, usize> = HashMap::new();
588    for change in &filtered_changes {
589        let commit_type = extract_commit_type(&change.title);
590        *count_by_type.entry(commit_type).or_insert(0) += 1;
591    }
592    commits.insert(
593        "count_by_type".to_string(),
594        serde_json::json!(count_by_type),
595    );
596
597    // Get first and last commit dates
598    if let Some(first) = filtered_changes.first() {
599        commits.insert(
600            "first_commit_date".to_string(),
601            serde_json::json!(first.date.clone()),
602        );
603    }
604    if let Some(last) = filtered_changes.last() {
605        commits.insert(
606            "last_commit_date".to_string(),
607            serde_json::json!(last.date.clone()),
608        );
609    }
610
611    // Count files and lines in the feature directory (excluding nested features)
612    let files_count = count_files(feature_path, &nested_feature_paths);
613    let lines_count = count_lines(feature_path, &nested_feature_paths);
614    let todos_count = count_todos(feature_path, &nested_feature_paths);
615
616    Some(Stats {
617        files_count: Some(files_count),
618        lines_count: Some(lines_count),
619        todos_count: Some(todos_count),
620        commits,
621        coverage: None,
622    })
623}
624
625/// Extract the commit type from a conventional commit title
626fn extract_commit_type(title: &str) -> String {
627    // Common conventional commit types
628    let known_types = [
629        "feat", "fix", "docs", "style", "refactor", "perf", "test", "build", "ci", "chore",
630        "revert",
631    ];
632
633    // Check if the title follows conventional commit format (type: description or type(scope): description)
634    if let Some(colon_pos) = title.find(':') {
635        let prefix = &title[..colon_pos];
636
637        // Remove scope if present (e.g., "feat(auth)" -> "feat")
638        let type_part = if let Some(paren_pos) = prefix.find('(') {
639            &prefix[..paren_pos]
640        } else {
641            prefix
642        };
643
644        let type_part = type_part.trim().to_lowercase();
645
646        // Check if it's a known conventional commit type
647        if known_types.contains(&type_part.as_str()) {
648            return type_part;
649        }
650    }
651
652    // If not a conventional commit, return "other"
653    "other".to_string()
654}
655
656fn process_feature_directory(
657    path: &Path,
658    base_path: &Path,
659    name: &str,
660    changes_map: Option<&HashMap<String, Vec<Change>>>,
661    parent_owner: Option<&str>,
662    feature_metadata_map: &FeatureMetadataMap,
663) -> Result<Feature> {
664    // Try to find and read README file, use defaults if not found
665    let mut readme_info = if let Some(readme_path) = find_readme_file(path) {
666        read_readme_info(&readme_path)?
667    } else {
668        use crate::readme_parser::ReadmeInfo;
669        ReadmeInfo {
670            title: None,
671            owner: "".to_string(),
672            description: "".to_string(),
673            meta: std::collections::HashMap::new(),
674        }
675    };
676
677    // Remove the 'feature' key from meta if it exists (it's redundant since we know it's a feature)
678    readme_info.meta.remove("feature");
679
680    // Get the folder name (the last component of the path)
681    let folder_name = path.file_name().and_then(|n| n.to_str()).unwrap_or(name);
682
683    // Check if this feature has any metadata from the global scan (matched by folder name)
684    if let Some(metadata_map) = feature_metadata_map.get(folder_name) {
685        // Iterate through each metadata key (e.g., "feature-flag", "feature-experiment")
686        for (metadata_key, flags) in metadata_map {
687            // Convert Vec<HashMap<String, String>> to JSON array
688            let flags_json: Vec<serde_json::Value> = flags
689                .iter()
690                .map(|flag_map| {
691                    let json_map: serde_json::Map<String, serde_json::Value> = flag_map
692                        .iter()
693                        .map(|(k, v)| (k.clone(), serde_json::Value::String(v.clone())))
694                        .collect();
695                    serde_json::Value::Object(json_map)
696                })
697                .collect();
698
699            // Check if this metadata key already exists, append if it does
700            readme_info
701                .meta
702                .entry(metadata_key.clone())
703                .and_modify(|existing| {
704                    if let serde_json::Value::Array(arr) = existing {
705                        arr.extend(flags_json.clone());
706                    }
707                })
708                .or_insert_with(|| serde_json::Value::Array(flags_json));
709        }
710    }
711
712    let changes = if let Some(map) = changes_map {
713        // Convert the absolute path to a repo-relative path and look up changes
714        get_changes_for_path(path, map).unwrap_or_default()
715    } else {
716        Vec::new()
717    };
718
719    // Always include decisions regardless of include_changes flag
720    let decisions = read_decision_files(path).unwrap_or_default();
721
722    // Determine the actual owner and whether it's inherited
723    let (actual_owner, is_owner_inherited) = if readme_info.owner.is_empty() {
724        if let Some(parent) = parent_owner {
725            (parent.to_string(), true)
726        } else {
727            ("".to_string(), false)
728        }
729    } else {
730        (readme_info.owner.clone(), false)
731    };
732
733    // Check if this feature has nested features in a 'features' subdirectory
734    let nested_features_path = path.join("features");
735    let mut nested_features = if nested_features_path.exists() && nested_features_path.is_dir() {
736        list_files_recursive_impl(
737            &nested_features_path,
738            base_path,
739            changes_map,
740            Some(&actual_owner),
741            feature_metadata_map,
742        )
743        .unwrap_or_default()
744    } else {
745        Vec::new()
746    };
747
748    // Also check for nested features marked with feature: true in subdirectories
749    let entries = fs::read_dir(path)
750        .with_context(|| format!("could not read directory `{}`", path.display()))?;
751
752    let mut entries: Vec<_> = entries.collect::<Result<_, _>>()?;
753    entries.sort_by_key(|entry| entry.path());
754
755    for entry in entries {
756        let entry_path = entry.path();
757        let entry_name = entry_path.file_name().unwrap().to_string_lossy();
758
759        if entry_path.is_dir()
760            && entry_name != "features" // Don't process 'features' folder twice
761            && !is_documentation_directory(&entry_path)
762        {
763            if has_feature_flag_in_readme(&entry_path) {
764                // This directory is a feature itself
765                let nested_feature = process_feature_directory(
766                    &entry_path,
767                    base_path,
768                    &entry_name,
769                    changes_map,
770                    Some(&actual_owner),
771                    feature_metadata_map,
772                )?;
773                nested_features.push(nested_feature);
774            } else {
775                // This directory is not a feature, but might contain features
776                // Recursively search for features inside it
777                let deeper_features = list_files_recursive_impl(
778                    &entry_path,
779                    base_path,
780                    changes_map,
781                    Some(&actual_owner),
782                    feature_metadata_map,
783                )?;
784                nested_features.extend(deeper_features);
785            }
786        }
787    }
788
789    // Collect paths of nested features to exclude from file/line counts
790    let nested_feature_paths: Vec<String> =
791        nested_features.iter().map(|f| f.path.clone()).collect();
792
793    // Always compute file, line, and TODO counts
794    let files_count = count_files(path, &nested_feature_paths);
795    let lines_count = count_lines(path, &nested_feature_paths);
796    let todos_count = count_todos(path, &nested_feature_paths);
797
798    // Compute stats from changes if available, otherwise create basic stats
799    let stats =
800        if let Some(change_stats) = compute_stats_from_changes(&changes, path, &nested_features) {
801            // If we have change stats, they already include files/lines/todos counts
802            Some(change_stats)
803        } else {
804            // No changes, but we still want to include file/line/todo counts
805            Some(Stats {
806                files_count: Some(files_count),
807                lines_count: Some(lines_count),
808                todos_count: Some(todos_count),
809                commits: HashMap::new(),
810                coverage: None,
811            })
812        };
813
814    // Make path relative to base_path
815    let relative_path = path
816        .strip_prefix(base_path)
817        .unwrap_or(path)
818        .to_string_lossy()
819        .to_string();
820
821    Ok(Feature {
822        name: readme_info.title.unwrap_or_else(|| name.to_string()),
823        description: readme_info.description,
824        owner: actual_owner,
825        is_owner_inherited,
826        path: relative_path,
827        features: nested_features,
828        meta: readme_info.meta,
829        changes,
830        decisions,
831        stats,
832        dependencies: Vec::new(), // Will be populated in second pass
833    })
834}
835
836fn list_files_recursive_impl(
837    dir: &Path,
838    base_path: &Path,
839    changes_map: Option<&HashMap<String, Vec<Change>>>,
840    parent_owner: Option<&str>,
841    feature_metadata_map: &FeatureMetadataMap,
842) -> Result<Vec<Feature>> {
843    let entries = fs::read_dir(dir)
844        .with_context(|| format!("could not read directory `{}`", dir.display()))?;
845
846    let mut entries: Vec<_> = entries.collect::<Result<_, _>>()?;
847    entries.sort_by_key(|entry| entry.path());
848
849    let mut features: Vec<Feature> = Vec::new();
850
851    for entry in entries {
852        let path = entry.path();
853        let name = path.file_name().unwrap().to_string_lossy();
854
855        if path.is_dir() {
856            if is_feature_directory(&path) {
857                let feature = process_feature_directory(
858                    &path,
859                    base_path,
860                    &name,
861                    changes_map,
862                    parent_owner,
863                    feature_metadata_map,
864                )?;
865                features.push(feature);
866            } else if !is_documentation_directory(&path)
867                && !is_inside_documentation_directory(&path)
868            {
869                // Recursively search for features in non-documentation subdirectories
870                let new_features = list_files_recursive_impl(
871                    &path,
872                    base_path,
873                    changes_map,
874                    parent_owner,
875                    feature_metadata_map,
876                )?;
877                features.extend(new_features);
878            }
879        }
880    }
881
882    Ok(features)
883}
884
885/// Get changes for a specific path from the pre-computed changes map
886fn get_changes_for_path(
887    path: &Path,
888    changes_map: &HashMap<String, Vec<Change>>,
889) -> Result<Vec<Change>> {
890    // Canonicalize the path
891    let canonical_path = std::fs::canonicalize(path)?;
892
893    // Find the repository and get the working directory
894    let repo = Repository::discover(path)?;
895    let repo_workdir = repo
896        .workdir()
897        .context("repository has no working directory")?;
898
899    // Convert to relative path from repo root
900    let relative_path = canonical_path
901        .strip_prefix(repo_workdir)
902        .context("path is not within repository")?;
903
904    let relative_path_str = relative_path.to_string_lossy().to_string();
905
906    // Look up the changes in the map
907    Ok(changes_map
908        .get(&relative_path_str)
909        .cloned()
910        .unwrap_or_default())
911}
912
913#[cfg(test)]
914mod tests {
915    use super::*;
916
917    #[test]
918    fn test_extract_commit_type() {
919        // Test standard conventional commit types
920        assert_eq!(extract_commit_type("feat: add new feature"), "feat");
921        assert_eq!(extract_commit_type("fix: resolve bug"), "fix");
922        assert_eq!(extract_commit_type("docs: update README"), "docs");
923        assert_eq!(extract_commit_type("style: format code"), "style");
924        assert_eq!(
925            extract_commit_type("refactor: improve structure"),
926            "refactor"
927        );
928        assert_eq!(extract_commit_type("perf: optimize performance"), "perf");
929        assert_eq!(extract_commit_type("test: add unit tests"), "test");
930        assert_eq!(extract_commit_type("build: update dependencies"), "build");
931        assert_eq!(extract_commit_type("ci: fix CI pipeline"), "ci");
932        assert_eq!(extract_commit_type("chore: update gitignore"), "chore");
933        assert_eq!(
934            extract_commit_type("revert: undo previous commit"),
935            "revert"
936        );
937
938        // Test with scope
939        assert_eq!(extract_commit_type("feat(auth): add login"), "feat");
940        assert_eq!(
941            extract_commit_type("fix(api): resolve endpoint issue"),
942            "fix"
943        );
944        assert_eq!(
945            extract_commit_type("docs(readme): update instructions"),
946            "docs"
947        );
948
949        // Test case insensitivity
950        assert_eq!(extract_commit_type("FEAT: uppercase type"), "feat");
951        assert_eq!(extract_commit_type("Fix: mixed case"), "fix");
952        assert_eq!(extract_commit_type("DOCS: all caps"), "docs");
953
954        // Test non-conventional commits
955        assert_eq!(extract_commit_type("random commit message"), "other");
956        assert_eq!(extract_commit_type("update: not conventional"), "other");
957        assert_eq!(
958            extract_commit_type("feature: close but not standard"),
959            "other"
960        );
961        assert_eq!(extract_commit_type("no colon here"), "other");
962        assert_eq!(extract_commit_type(""), "other");
963
964        // Test edge cases
965        assert_eq!(extract_commit_type("feat:no space after colon"), "feat");
966        assert_eq!(extract_commit_type("feat  : extra spaces"), "feat");
967        assert_eq!(
968            extract_commit_type("feat(scope)(weird): nested parens"),
969            "feat"
970        );
971    }
972}