Skip to main content

features_cli/
file_scanner.rs

1use anyhow::{Context, Result};
2use git2::Repository;
3use std::collections::HashMap;
4use std::fs;
5use std::path::Path;
6
7use crate::dependency_resolver::{
8    build_file_to_feature_map, collect_feature_info, resolve_feature_dependencies,
9};
10use crate::feature_metadata_detector::{self, FeatureMetadataMap};
11use crate::features_toml_parser::{find_features_toml, read_features_toml};
12use crate::git_helper::get_all_commits_by_path;
13use crate::import_detector::{ImportStatement, build_file_map, scan_file_for_imports};
14use crate::models::{Change, Feature, Stats};
15use crate::readme_parser::read_readme_info;
16
17fn is_documentation_directory(dir_path: &Path) -> bool {
18    let dir_name = dir_path
19        .file_name()
20        .and_then(|name| name.to_str())
21        .unwrap_or("");
22
23    // Common documentation directory names
24    let doc_dirs = ["docs", "__docs__", ".docs"];
25
26    doc_dirs.contains(&dir_name.to_lowercase().as_str())
27}
28
29fn is_inside_documentation_directory(dir_path: &Path) -> bool {
30    // Check if any parent directory is a documentation directory
31    for ancestor in dir_path.ancestors().skip(1) {
32        if is_documentation_directory(ancestor) {
33            return true;
34        }
35    }
36    false
37}
38
39fn is_direct_subfolder_of_features(dir_path: &Path) -> bool {
40    if let Some(parent) = dir_path.parent()
41        && let Some(parent_name) = parent.file_name().and_then(|name| name.to_str())
42    {
43        return parent_name == "features";
44    }
45    false
46}
47
48fn find_readme_file(dir_path: &Path) -> Option<std::path::PathBuf> {
49    let readme_candidates = ["README.md", "README.mdx"];
50
51    for candidate in &readme_candidates {
52        let readme_path = dir_path.join(candidate);
53        if readme_path.exists() {
54            return Some(readme_path);
55        }
56    }
57
58    None
59}
60
61/// Check if a directory has a README with `feature: true` in front matter
62fn has_feature_flag_in_readme(dir_path: &Path) -> bool {
63    if let Some(readme_path) = find_readme_file(dir_path)
64        && let Ok(content) = fs::read_to_string(&readme_path)
65    {
66        // Check if content starts with YAML front matter (---)
67        if let Some(stripped) = content.strip_prefix("---\n")
68            && let Some(end_pos) = stripped.find("\n---\n")
69        {
70            let yaml_content = &stripped[..end_pos];
71
72            // Parse YAML front matter
73            if let Ok(yaml_value) = serde_yaml::from_str::<serde_yaml::Value>(yaml_content)
74                && let Some(mapping) = yaml_value.as_mapping()
75            {
76                // Check for feature: true
77                if let Some(feature_value) =
78                    mapping.get(serde_yaml::Value::String("feature".to_string()))
79                {
80                    return feature_value.as_bool() == Some(true);
81                }
82            }
83        }
84    }
85    false
86}
87
88/// Check if a directory should be treated as a feature
89fn is_feature_directory(dir_path: &Path) -> bool {
90    // Skip documentation directories
91    if is_documentation_directory(dir_path) || is_inside_documentation_directory(dir_path) {
92        return false;
93    }
94
95    // Check if it's a direct subfolder of "features" (existing behavior)
96    if is_direct_subfolder_of_features(dir_path) {
97        return true;
98    }
99
100    // Check if the directory has a README with feature: true
101    has_feature_flag_in_readme(dir_path)
102}
103
104pub fn list_files_recursive(dir: &Path) -> Result<Vec<Feature>> {
105    // Scan entire base_path for feature metadata once
106    let feature_metadata =
107        feature_metadata_detector::scan_directory_for_feature_metadata(dir).unwrap_or_default();
108
109    // First pass: build feature structure without dependencies
110    let mut features = list_files_recursive_impl(dir, dir, None, None, &feature_metadata)?;
111
112    // Second pass: scan for imports and resolve dependencies
113    populate_dependencies(&mut features, dir)?;
114
115    Ok(features)
116}
117
118pub fn list_files_recursive_with_changes(dir: &Path) -> Result<Vec<Feature>> {
119    // Get all commits once at the beginning for efficiency
120    let all_commits = get_all_commits_by_path(dir).unwrap_or_default();
121    // Scan entire base_path for feature metadata once
122    let feature_metadata =
123        feature_metadata_detector::scan_directory_for_feature_metadata(dir).unwrap_or_default();
124
125    // First pass: build feature structure without dependencies
126    let mut features =
127        list_files_recursive_impl(dir, dir, Some(&all_commits), None, &feature_metadata)?;
128
129    // Second pass: scan for imports and resolve dependencies
130    populate_dependencies(&mut features, dir)?;
131
132    Ok(features)
133}
134
135/// Populate dependencies for all features by scanning imports
136fn populate_dependencies(features: &mut [Feature], base_path: &Path) -> Result<()> {
137    // Build file map for quick path resolution
138    let file_map = build_file_map(base_path);
139
140    // Collect all feature info (flat list with paths)
141    let mut feature_info_list = Vec::new();
142    collect_feature_info(features, None, &mut feature_info_list);
143
144    // Build file-to-feature mapping
145    let file_to_feature_map = build_file_to_feature_map(&feature_info_list, base_path);
146
147    // Build feature path to name mapping (path is the unique identifier)
148    let mut feature_path_to_name_map = HashMap::new();
149    for info in &feature_info_list {
150        feature_path_to_name_map.insert(info.path.to_string_lossy().to_string(), info.name.clone());
151    }
152
153    // Scan all files in each feature for imports
154    let mut feature_imports: HashMap<String, Vec<ImportStatement>> = HashMap::new();
155
156    for feature_info in &feature_info_list {
157        let feature_path = base_path.join(&feature_info.path);
158        let imports = scan_feature_directory_for_imports(&feature_path);
159        feature_imports.insert(feature_info.name.clone(), imports);
160    }
161
162    // Now populate dependencies in the feature tree
163    populate_dependencies_recursive(
164        features,
165        base_path,
166        &feature_imports,
167        &file_to_feature_map,
168        &feature_path_to_name_map,
169        &file_map,
170    );
171
172    Ok(())
173}
174
175/// Scan a feature directory for all import statements
176fn scan_feature_directory_for_imports(feature_path: &Path) -> Vec<ImportStatement> {
177    let mut all_imports = Vec::new();
178
179    if let Ok(entries) = fs::read_dir(feature_path) {
180        for entry in entries.flatten() {
181            let path = entry.path();
182
183            // Skip documentation directories
184            if is_documentation_directory(&path) {
185                continue;
186            }
187
188            if path.is_file() {
189                if let Ok(imports) = scan_file_for_imports(&path) {
190                    all_imports.extend(imports);
191                }
192            } else if path.is_dir() {
193                // Skip 'features' subdirectory (contains nested features)
194                let dir_name = path.file_name().and_then(|n| n.to_str()).unwrap_or("");
195                if dir_name == "features" {
196                    continue;
197                }
198
199                // Recursively scan subdirectories (but not nested features with readme flag)
200                if !has_feature_flag_in_readme(&path) {
201                    let nested_imports = scan_feature_directory_for_imports(&path);
202                    all_imports.extend(nested_imports);
203                }
204            }
205        }
206    }
207
208    all_imports
209}
210
211/// Recursively populate dependencies in the feature tree
212fn populate_dependencies_recursive(
213    features: &mut [Feature],
214    base_path: &Path,
215    feature_imports: &HashMap<String, Vec<ImportStatement>>,
216    file_to_feature_map: &HashMap<std::path::PathBuf, String>,
217    feature_path_to_name_map: &HashMap<String, String>,
218    file_map: &HashMap<String, std::path::PathBuf>,
219) {
220    for feature in features {
221        // Get imports for this feature
222        if let Some(imports) = feature_imports.get(&feature.name) {
223            let feature_path = std::path::PathBuf::from(&feature.path);
224
225            // Resolve dependencies
226            let dependencies = resolve_feature_dependencies(
227                &feature.name,
228                &feature_path,
229                base_path,
230                imports,
231                file_to_feature_map,
232                feature_path_to_name_map,
233                file_map,
234            );
235
236            feature.dependencies = dependencies;
237        }
238
239        // Recursively process nested features
240        if !feature.features.is_empty() {
241            populate_dependencies_recursive(
242                &mut feature.features,
243                base_path,
244                feature_imports,
245                file_to_feature_map,
246                feature_path_to_name_map,
247                file_map,
248            );
249        }
250    }
251}
252
253fn read_decision_files(feature_path: &Path) -> Result<Vec<String>> {
254    let mut decisions = Vec::new();
255
256    // Check both "decision" and "decisions" folder names
257    let decision_paths = [
258        feature_path.join(".docs").join("decisions"),
259        feature_path.join("__docs__").join("decisions"),
260    ];
261
262    for decisions_dir in &decision_paths {
263        if decisions_dir.exists() && decisions_dir.is_dir() {
264            let entries = fs::read_dir(decisions_dir).with_context(|| {
265                format!(
266                    "could not read decisions directory `{}`",
267                    decisions_dir.display()
268                )
269            })?;
270
271            for entry in entries {
272                let entry = entry?;
273                let path = entry.path();
274
275                // Skip README.md files and only process .md files
276                if path.is_file()
277                    && let Some(file_name) = path.file_name()
278                {
279                    let file_name_str = file_name.to_string_lossy();
280                    if file_name_str.ends_with(".md") && file_name_str != "README.md" {
281                        let content = fs::read_to_string(&path).with_context(|| {
282                            format!("could not read decision file `{}`", path.display())
283                        })?;
284                        decisions.push(content);
285                    }
286                }
287            }
288            break; // If we found one of the directories, don't check the other
289        }
290    }
291
292    Ok(decisions)
293}
294
295/// Count the number of files in a feature directory (excluding documentation)
296fn count_files(feature_path: &Path, nested_feature_paths: &[String]) -> usize {
297    let mut file_count = 0;
298
299    if let Ok(entries) = fs::read_dir(feature_path) {
300        for entry in entries.flatten() {
301            let path = entry.path();
302            let path_str = path.to_string_lossy().to_string();
303
304            // Skip documentation directories
305            if is_documentation_directory(&path) {
306                continue;
307            }
308
309            // Skip nested feature directories
310            if nested_feature_paths
311                .iter()
312                .any(|nfp| path_str.starts_with(nfp))
313            {
314                continue;
315            }
316
317            if path.is_file() {
318                file_count += 1;
319            } else if path.is_dir() {
320                // Recursively count files in subdirectories
321                file_count += count_files(&path, nested_feature_paths);
322            }
323        }
324    }
325
326    file_count
327}
328
329/// Count the total number of lines in all files in a feature directory (excluding documentation)
330fn count_lines(feature_path: &Path, nested_feature_paths: &[String]) -> usize {
331    let mut line_count = 0;
332
333    if let Ok(entries) = fs::read_dir(feature_path) {
334        for entry in entries.flatten() {
335            let path = entry.path();
336            let path_str = path.to_string_lossy().to_string();
337
338            // Skip documentation directories
339            if is_documentation_directory(&path) {
340                continue;
341            }
342
343            // Skip nested feature directories
344            if nested_feature_paths
345                .iter()
346                .any(|nfp| path_str.starts_with(nfp))
347            {
348                continue;
349            }
350
351            if path.is_file() {
352                // Try to read the file and count lines
353                if let Ok(content) = fs::read_to_string(&path) {
354                    line_count += content.lines().count();
355                }
356            } else if path.is_dir() {
357                // Recursively count lines in subdirectories
358                line_count += count_lines(&path, nested_feature_paths);
359            }
360        }
361    }
362
363    line_count
364}
365
366/// Count the total number of TODO comments in all files in a feature directory (excluding documentation)
367fn count_todos(feature_path: &Path, nested_feature_paths: &[String]) -> usize {
368    let mut todo_count = 0;
369
370    if let Ok(entries) = fs::read_dir(feature_path) {
371        for entry in entries.flatten() {
372            let path = entry.path();
373            let path_str = path.to_string_lossy().to_string();
374
375            // Skip documentation directories
376            if is_documentation_directory(&path) {
377                continue;
378            }
379
380            // Skip nested feature directories
381            if nested_feature_paths
382                .iter()
383                .any(|nfp| path_str.starts_with(nfp))
384            {
385                continue;
386            }
387
388            if path.is_file() {
389                // Try to read the file and count TODO comments
390                if let Ok(content) = fs::read_to_string(&path) {
391                    for line in content.lines() {
392                        // Look for TODO in comments (case-insensitive)
393                        let line_upper = line.to_uppercase();
394                        if line_upper.contains("TODO") {
395                            todo_count += 1;
396                        }
397                    }
398                }
399            } else if path.is_dir() {
400                // Recursively count TODOs in subdirectories
401                todo_count += count_todos(&path, nested_feature_paths);
402            }
403        }
404    }
405
406    todo_count
407}
408
409/// Get the paths affected by a specific commit
410fn get_commit_affected_paths(repo: &Repository, commit_hash: &str) -> Vec<String> {
411    let Ok(oid) = git2::Oid::from_str(commit_hash) else {
412        return Vec::new();
413    };
414
415    let Ok(commit) = repo.find_commit(oid) else {
416        return Vec::new();
417    };
418
419    let mut paths = Vec::new();
420
421    // For the first commit (no parents), get all files in the tree
422    if commit.parent_count() == 0 {
423        if let Ok(tree) = commit.tree() {
424            collect_all_tree_paths(repo, &tree, "", &mut paths);
425        }
426        return paths;
427    }
428
429    // For commits with parents, check the diff
430    let Ok(tree) = commit.tree() else {
431        return Vec::new();
432    };
433
434    let Ok(parent) = commit.parent(0) else {
435        return Vec::new();
436    };
437
438    let Ok(parent_tree) = parent.tree() else {
439        return Vec::new();
440    };
441
442    if let Ok(diff) = repo.diff_tree_to_tree(Some(&parent_tree), Some(&tree), None) {
443        let _ = diff.foreach(
444            &mut |delta, _| {
445                if let Some(path) = delta.new_file().path()
446                    && let Some(path_str) = path.to_str()
447                {
448                    paths.push(path_str.to_string());
449                }
450                if let Some(path) = delta.old_file().path()
451                    && let Some(path_str) = path.to_str()
452                    && !paths.contains(&path_str.to_string())
453                {
454                    paths.push(path_str.to_string());
455                }
456                true
457            },
458            None,
459            None,
460            None,
461        );
462    }
463
464    paths
465}
466
467/// Collect all file paths in a tree (helper for get_commit_affected_paths)
468fn collect_all_tree_paths(
469    repo: &Repository,
470    tree: &git2::Tree,
471    prefix: &str,
472    paths: &mut Vec<String>,
473) {
474    for entry in tree.iter() {
475        if let Some(name) = entry.name() {
476            let path = if prefix.is_empty() {
477                name.to_string()
478            } else {
479                format!("{}/{}", prefix, name)
480            };
481
482            paths.push(path.clone());
483
484            if entry.kind() == Some(git2::ObjectType::Tree)
485                && let Ok(obj) = entry.to_object(repo)
486                && let Ok(subtree) = obj.peel_to_tree()
487            {
488                collect_all_tree_paths(repo, &subtree, &path, paths);
489            }
490        }
491    }
492}
493
494/// Compute statistics from changes for a feature
495fn compute_stats_from_changes(
496    changes: &[Change],
497    feature_path: &Path,
498    nested_features: &[Feature],
499) -> Option<Stats> {
500    if changes.is_empty() {
501        return None;
502    }
503
504    // Collect paths of nested features to exclude from commit counts
505    let nested_feature_paths: Vec<String> =
506        nested_features.iter().map(|f| f.path.clone()).collect();
507
508    // Get repository to check commit details
509    let repo = Repository::discover(feature_path).ok();
510
511    // Get the feature's relative path from repo root
512    let feature_relative_path = if let Some(ref r) = repo {
513        if let Ok(canonical_path) = std::fs::canonicalize(feature_path) {
514            if let Some(workdir) = r.workdir() {
515                canonical_path
516                    .strip_prefix(workdir)
517                    .ok()
518                    .map(|p| p.to_string_lossy().to_string())
519            } else {
520                None
521            }
522        } else {
523            None
524        }
525    } else {
526        None
527    };
528
529    // Filter changes to only include those that affect files in this feature
530    // (not exclusively in nested features)
531    let filtered_changes: Vec<&Change> = changes
532        .iter()
533        .filter(|change| {
534            // If we don't have repo access, include all changes
535            let Some(ref r) = repo else {
536                return true;
537            };
538
539            let Some(ref feature_rel_path) = feature_relative_path else {
540                return true;
541            };
542
543            // Get the files affected by this commit
544            let affected_files = get_commit_affected_paths(r, &change.hash);
545
546            // Check if any affected file is in this feature but not in a nested feature
547            affected_files.iter().any(|file_path| {
548                // File must be in this feature
549                let in_feature = file_path.starts_with(feature_rel_path);
550
551                // File must not be exclusively in a nested feature
552                let in_nested = nested_feature_paths.iter().any(|nested_path| {
553                    // Convert nested absolute path to relative path
554                    if let Ok(nested_canonical) = std::fs::canonicalize(nested_path)
555                        && let Some(workdir) = r.workdir()
556                        && let Ok(nested_rel) = nested_canonical.strip_prefix(workdir)
557                    {
558                        let nested_rel_str = nested_rel.to_string_lossy();
559                        return file_path.starts_with(nested_rel_str.as_ref());
560                    }
561                    false
562                });
563
564                in_feature && !in_nested
565            })
566        })
567        .collect();
568
569    let mut commits = HashMap::new();
570
571    // Add total commit count
572    commits.insert(
573        "total_commits".to_string(),
574        serde_json::json!(filtered_changes.len()),
575    );
576
577    // Count commits by author
578    let mut authors_count: HashMap<String, usize> = HashMap::new();
579    for change in &filtered_changes {
580        *authors_count.entry(change.author_name.clone()).or_insert(0) += 1;
581    }
582    commits.insert(
583        "authors_count".to_string(),
584        serde_json::json!(authors_count),
585    );
586
587    // Count commits by conventional commit type
588    let mut count_by_type: HashMap<String, usize> = HashMap::new();
589    for change in &filtered_changes {
590        let commit_type = extract_commit_type(&change.title);
591        *count_by_type.entry(commit_type).or_insert(0) += 1;
592    }
593    commits.insert(
594        "count_by_type".to_string(),
595        serde_json::json!(count_by_type),
596    );
597
598    // Get first and last commit dates
599    if let Some(first) = filtered_changes.first() {
600        commits.insert(
601            "first_commit_date".to_string(),
602            serde_json::json!(first.date.clone()),
603        );
604    }
605    if let Some(last) = filtered_changes.last() {
606        commits.insert(
607            "last_commit_date".to_string(),
608            serde_json::json!(last.date.clone()),
609        );
610    }
611
612    // Count files and lines in the feature directory (excluding nested features)
613    let files_count = count_files(feature_path, &nested_feature_paths);
614    let lines_count = count_lines(feature_path, &nested_feature_paths);
615    let todos_count = count_todos(feature_path, &nested_feature_paths);
616
617    Some(Stats {
618        files_count: Some(files_count),
619        lines_count: Some(lines_count),
620        todos_count: Some(todos_count),
621        commits,
622        coverage: None,
623    })
624}
625
626/// Extract the commit type from a conventional commit title
627fn extract_commit_type(title: &str) -> String {
628    // Common conventional commit types
629    let known_types = [
630        "feat", "fix", "docs", "style", "refactor", "perf", "test", "build", "ci", "chore",
631        "revert",
632    ];
633
634    // Check if the title follows conventional commit format (type: description or type(scope): description)
635    if let Some(colon_pos) = title.find(':') {
636        let prefix = &title[..colon_pos];
637
638        // Remove scope if present (e.g., "feat(auth)" -> "feat")
639        let type_part = if let Some(paren_pos) = prefix.find('(') {
640            &prefix[..paren_pos]
641        } else {
642            prefix
643        };
644
645        let type_part = type_part.trim().to_lowercase();
646
647        // Check if it's a known conventional commit type
648        if known_types.contains(&type_part.as_str()) {
649            return type_part;
650        }
651    }
652
653    // If not a conventional commit, return "other"
654    "other".to_string()
655}
656
657fn process_feature_directory(
658    path: &Path,
659    base_path: &Path,
660    name: &str,
661    changes_map: Option<&HashMap<String, Vec<Change>>>,
662    parent_owner: Option<&str>,
663    feature_metadata_map: &FeatureMetadataMap,
664) -> Result<Feature> {
665    // First try to find and read FEATURES.toml file
666    let (title, owner, description, mut meta) = if let Some(toml_path) = find_features_toml(path) {
667        if let Ok(toml_data) = read_features_toml(&toml_path) {
668            (
669                toml_data.name,
670                toml_data.owner.unwrap_or_default(),
671                toml_data.description.unwrap_or_default(),
672                toml_data.meta,
673            )
674        } else {
675            (None, String::new(), String::new(), HashMap::new())
676        }
677    } else {
678        // Fall back to README file if FEATURES.toml not found
679        let readme_info = if let Some(readme_path) = find_readme_file(path) {
680            read_readme_info(&readme_path)?
681        } else {
682            use crate::readme_parser::ReadmeInfo;
683            ReadmeInfo {
684                title: None,
685                owner: "".to_string(),
686                description: "".to_string(),
687                meta: std::collections::HashMap::new(),
688            }
689        };
690        (
691            readme_info.title,
692            readme_info.owner,
693            readme_info.description,
694            readme_info.meta,
695        )
696    };
697
698    // Remove the 'feature' key from meta if it exists (it's redundant since we know it's a feature)
699    meta.remove("feature");
700
701    // Get the relative path to this feature directory for metadata lookup
702    let relative_path = path
703        .strip_prefix(base_path)
704        .unwrap_or(path)
705        .to_string_lossy()
706        .to_string();
707
708    // Check if this feature has any metadata from the global scan (matched by feature path)
709    if let Some(metadata_map) = feature_metadata_map.get(&relative_path) {
710        // Iterate through each metadata key (e.g., "feature-flag", "feature-experiment")
711        for (metadata_key, flags) in metadata_map {
712            // Convert Vec<HashMap<String, String>> to JSON array
713            let flags_json: Vec<serde_json::Value> = flags
714                .iter()
715                .map(|flag_map| {
716                    let json_map: serde_json::Map<String, serde_json::Value> = flag_map
717                        .iter()
718                        .map(|(k, v)| (k.clone(), serde_json::Value::String(v.clone())))
719                        .collect();
720                    serde_json::Value::Object(json_map)
721                })
722                .collect();
723
724            // Check if this metadata key already exists, append if it does
725            meta.entry(metadata_key.clone())
726                .and_modify(|existing| {
727                    if let serde_json::Value::Array(arr) = existing {
728                        arr.extend(flags_json.clone());
729                    }
730                })
731                .or_insert_with(|| serde_json::Value::Array(flags_json));
732        }
733    }
734
735    let changes = if let Some(map) = changes_map {
736        // Convert the absolute path to a repo-relative path and look up changes
737        get_changes_for_path(path, map).unwrap_or_default()
738    } else {
739        Vec::new()
740    };
741
742    // Always include decisions regardless of include_changes flag
743    let decisions = read_decision_files(path).unwrap_or_default();
744
745    // Determine the actual owner and whether it's inherited
746    let (actual_owner, is_owner_inherited) = if owner.is_empty() {
747        if let Some(parent) = parent_owner {
748            (parent.to_string(), true)
749        } else {
750            ("".to_string(), false)
751        }
752    } else {
753        (owner.clone(), false)
754    };
755
756    // Check if this feature has nested features in a 'features' subdirectory
757    let nested_features_path = path.join("features");
758    let mut nested_features = if nested_features_path.exists() && nested_features_path.is_dir() {
759        list_files_recursive_impl(
760            &nested_features_path,
761            base_path,
762            changes_map,
763            Some(&actual_owner),
764            feature_metadata_map,
765        )
766        .unwrap_or_default()
767    } else {
768        Vec::new()
769    };
770
771    // Also check for nested features marked with feature: true in subdirectories
772    let entries = fs::read_dir(path)
773        .with_context(|| format!("could not read directory `{}`", path.display()))?;
774
775    let mut entries: Vec<_> = entries.collect::<Result<_, _>>()?;
776    entries.sort_by_key(|entry| entry.path());
777
778    for entry in entries {
779        let entry_path = entry.path();
780        let entry_name = entry_path.file_name().unwrap().to_string_lossy();
781
782        if entry_path.is_dir()
783            && entry_name != "features" // Don't process 'features' folder twice
784            && !is_documentation_directory(&entry_path)
785        {
786            if has_feature_flag_in_readme(&entry_path) {
787                // This directory is a feature itself
788                let nested_feature = process_feature_directory(
789                    &entry_path,
790                    base_path,
791                    &entry_name,
792                    changes_map,
793                    Some(&actual_owner),
794                    feature_metadata_map,
795                )?;
796                nested_features.push(nested_feature);
797            } else {
798                // This directory is not a feature, but might contain features
799                // Recursively search for features inside it
800                let deeper_features = list_files_recursive_impl(
801                    &entry_path,
802                    base_path,
803                    changes_map,
804                    Some(&actual_owner),
805                    feature_metadata_map,
806                )?;
807                nested_features.extend(deeper_features);
808            }
809        }
810    }
811
812    // Collect paths of nested features to exclude from file/line counts
813    let nested_feature_paths: Vec<String> =
814        nested_features.iter().map(|f| f.path.clone()).collect();
815
816    // Always compute file, line, and TODO counts
817    let files_count = count_files(path, &nested_feature_paths);
818    let lines_count = count_lines(path, &nested_feature_paths);
819    let todos_count = count_todos(path, &nested_feature_paths);
820
821    // Compute stats from changes if available, otherwise create basic stats
822    let stats =
823        if let Some(change_stats) = compute_stats_from_changes(&changes, path, &nested_features) {
824            // If we have change stats, they already include files/lines/todos counts
825            Some(change_stats)
826        } else {
827            // No changes, but we still want to include file/line/todo counts
828            Some(Stats {
829                files_count: Some(files_count),
830                lines_count: Some(lines_count),
831                todos_count: Some(todos_count),
832                commits: HashMap::new(),
833                coverage: None,
834            })
835        };
836
837    // Make path relative to base_path
838    let relative_path = path
839        .strip_prefix(base_path)
840        .unwrap_or(path)
841        .to_string_lossy()
842        .to_string();
843
844    Ok(Feature {
845        name: title.unwrap_or_else(|| name.to_string()),
846        description,
847        owner: actual_owner,
848        is_owner_inherited,
849        path: relative_path,
850        features: nested_features,
851        meta,
852        changes,
853        decisions,
854        stats,
855        dependencies: Vec::new(), // Will be populated in second pass
856    })
857}
858
859fn list_files_recursive_impl(
860    dir: &Path,
861    base_path: &Path,
862    changes_map: Option<&HashMap<String, Vec<Change>>>,
863    parent_owner: Option<&str>,
864    feature_metadata_map: &FeatureMetadataMap,
865) -> Result<Vec<Feature>> {
866    let entries = fs::read_dir(dir)
867        .with_context(|| format!("could not read directory `{}`", dir.display()))?;
868
869    let mut entries: Vec<_> = entries.collect::<Result<_, _>>()?;
870    entries.sort_by_key(|entry| entry.path());
871
872    let mut features: Vec<Feature> = Vec::new();
873
874    for entry in entries {
875        let path = entry.path();
876        let name = path.file_name().unwrap().to_string_lossy();
877
878        if path.is_dir() {
879            if is_feature_directory(&path) {
880                let feature = process_feature_directory(
881                    &path,
882                    base_path,
883                    &name,
884                    changes_map,
885                    parent_owner,
886                    feature_metadata_map,
887                )?;
888                features.push(feature);
889            } else if !is_documentation_directory(&path)
890                && !is_inside_documentation_directory(&path)
891            {
892                // Recursively search for features in non-documentation subdirectories
893                let new_features = list_files_recursive_impl(
894                    &path,
895                    base_path,
896                    changes_map,
897                    parent_owner,
898                    feature_metadata_map,
899                )?;
900                features.extend(new_features);
901            }
902        }
903    }
904
905    Ok(features)
906}
907
908/// Get changes for a specific path from the pre-computed changes map
909fn get_changes_for_path(
910    path: &Path,
911    changes_map: &HashMap<String, Vec<Change>>,
912) -> Result<Vec<Change>> {
913    // Canonicalize the path
914    let canonical_path = std::fs::canonicalize(path)?;
915
916    // Find the repository and get the working directory
917    let repo = Repository::discover(path)?;
918    let repo_workdir = repo
919        .workdir()
920        .context("repository has no working directory")?;
921
922    // Convert to relative path from repo root
923    let relative_path = canonical_path
924        .strip_prefix(repo_workdir)
925        .context("path is not within repository")?;
926
927    let relative_path_str = relative_path.to_string_lossy().to_string();
928
929    // Look up the changes in the map
930    Ok(changes_map
931        .get(&relative_path_str)
932        .cloned()
933        .unwrap_or_default())
934}
935
936#[cfg(test)]
937mod tests {
938    use super::*;
939
940    #[test]
941    fn test_extract_commit_type() {
942        // Test standard conventional commit types
943        assert_eq!(extract_commit_type("feat: add new feature"), "feat");
944        assert_eq!(extract_commit_type("fix: resolve bug"), "fix");
945        assert_eq!(extract_commit_type("docs: update README"), "docs");
946        assert_eq!(extract_commit_type("style: format code"), "style");
947        assert_eq!(
948            extract_commit_type("refactor: improve structure"),
949            "refactor"
950        );
951        assert_eq!(extract_commit_type("perf: optimize performance"), "perf");
952        assert_eq!(extract_commit_type("test: add unit tests"), "test");
953        assert_eq!(extract_commit_type("build: update dependencies"), "build");
954        assert_eq!(extract_commit_type("ci: fix CI pipeline"), "ci");
955        assert_eq!(extract_commit_type("chore: update gitignore"), "chore");
956        assert_eq!(
957            extract_commit_type("revert: undo previous commit"),
958            "revert"
959        );
960
961        // Test with scope
962        assert_eq!(extract_commit_type("feat(auth): add login"), "feat");
963        assert_eq!(
964            extract_commit_type("fix(api): resolve endpoint issue"),
965            "fix"
966        );
967        assert_eq!(
968            extract_commit_type("docs(readme): update instructions"),
969            "docs"
970        );
971
972        // Test case insensitivity
973        assert_eq!(extract_commit_type("FEAT: uppercase type"), "feat");
974        assert_eq!(extract_commit_type("Fix: mixed case"), "fix");
975        assert_eq!(extract_commit_type("DOCS: all caps"), "docs");
976
977        // Test non-conventional commits
978        assert_eq!(extract_commit_type("random commit message"), "other");
979        assert_eq!(extract_commit_type("update: not conventional"), "other");
980        assert_eq!(
981            extract_commit_type("feature: close but not standard"),
982            "other"
983        );
984        assert_eq!(extract_commit_type("no colon here"), "other");
985        assert_eq!(extract_commit_type(""), "other");
986
987        // Test edge cases
988        assert_eq!(extract_commit_type("feat:no space after colon"), "feat");
989        assert_eq!(extract_commit_type("feat  : extra spaces"), "feat");
990        assert_eq!(
991            extract_commit_type("feat(scope)(weird): nested parens"),
992            "feat"
993        );
994    }
995}