features_cli/
file_scanner.rs

1use anyhow::{Context, Result};
2use git2::Repository;
3use std::collections::HashMap;
4use std::fs;
5use std::path::Path;
6
7use crate::feature_metadata_detector::{self, FeatureMetadataMap};
8use crate::git_helper::get_all_commits_by_path;
9use crate::models::{Change, Feature, Stats};
10use crate::readme_parser::read_readme_info;
11
12fn is_documentation_directory(dir_path: &Path) -> bool {
13    let dir_name = dir_path
14        .file_name()
15        .and_then(|name| name.to_str())
16        .unwrap_or("");
17
18    // Common documentation directory names
19    let doc_dirs = ["docs", "__docs__", ".docs"];
20
21    doc_dirs.contains(&dir_name.to_lowercase().as_str())
22}
23
24fn is_inside_documentation_directory(dir_path: &Path) -> bool {
25    // Check if any parent directory is a documentation directory
26    for ancestor in dir_path.ancestors().skip(1) {
27        if is_documentation_directory(ancestor) {
28            return true;
29        }
30    }
31    false
32}
33
34fn is_direct_subfolder_of_features(dir_path: &Path) -> bool {
35    if let Some(parent) = dir_path.parent()
36        && let Some(parent_name) = parent.file_name().and_then(|name| name.to_str())
37    {
38        return parent_name == "features";
39    }
40    false
41}
42
43fn find_readme_file(dir_path: &Path) -> Option<std::path::PathBuf> {
44    let readme_candidates = ["README.md", "README.mdx"];
45
46    for candidate in &readme_candidates {
47        let readme_path = dir_path.join(candidate);
48        if readme_path.exists() {
49            return Some(readme_path);
50        }
51    }
52
53    None
54}
55
56/// Check if a directory has a README with `feature: true` in front matter
57fn has_feature_flag_in_readme(dir_path: &Path) -> bool {
58    if let Some(readme_path) = find_readme_file(dir_path)
59        && let Ok(content) = fs::read_to_string(&readme_path)
60    {
61        // Check if content starts with YAML front matter (---)
62        if let Some(stripped) = content.strip_prefix("---\n")
63            && let Some(end_pos) = stripped.find("\n---\n")
64        {
65            let yaml_content = &stripped[..end_pos];
66
67            // Parse YAML front matter
68            if let Ok(yaml_value) = serde_yaml::from_str::<serde_yaml::Value>(yaml_content)
69                && let Some(mapping) = yaml_value.as_mapping()
70            {
71                // Check for feature: true
72                if let Some(feature_value) =
73                    mapping.get(serde_yaml::Value::String("feature".to_string()))
74                {
75                    return feature_value.as_bool() == Some(true);
76                }
77            }
78        }
79    }
80    false
81}
82
83/// Check if a directory should be treated as a feature
84fn is_feature_directory(dir_path: &Path) -> bool {
85    // Skip documentation directories
86    if is_documentation_directory(dir_path) || is_inside_documentation_directory(dir_path) {
87        return false;
88    }
89
90    // Check if it's a direct subfolder of "features" (existing behavior)
91    if is_direct_subfolder_of_features(dir_path) {
92        return true;
93    }
94
95    // Check if the directory has a README with feature: true
96    has_feature_flag_in_readme(dir_path)
97}
98
99pub fn list_files_recursive(dir: &Path) -> Result<Vec<Feature>> {
100    // Scan entire base_path for feature metadata once
101    let feature_metadata =
102        feature_metadata_detector::scan_directory_for_feature_metadata(dir).unwrap_or_default();
103    list_files_recursive_impl(dir, dir, None, None, &feature_metadata)
104}
105
106pub fn list_files_recursive_with_changes(dir: &Path) -> Result<Vec<Feature>> {
107    // Get all commits once at the beginning for efficiency
108    let all_commits = get_all_commits_by_path(dir).unwrap_or_default();
109    // Scan entire base_path for feature metadata once
110    let feature_metadata =
111        feature_metadata_detector::scan_directory_for_feature_metadata(dir).unwrap_or_default();
112    list_files_recursive_impl(dir, dir, Some(&all_commits), None, &feature_metadata)
113}
114
115fn read_decision_files(feature_path: &Path) -> Result<Vec<String>> {
116    let mut decisions = Vec::new();
117
118    // Check both "decision" and "decisions" folder names
119    let decision_paths = [
120        feature_path.join(".docs").join("decisions"),
121        feature_path.join("__docs__").join("decisions"),
122    ];
123
124    for decisions_dir in &decision_paths {
125        if decisions_dir.exists() && decisions_dir.is_dir() {
126            let entries = fs::read_dir(decisions_dir).with_context(|| {
127                format!(
128                    "could not read decisions directory `{}`",
129                    decisions_dir.display()
130                )
131            })?;
132
133            for entry in entries {
134                let entry = entry?;
135                let path = entry.path();
136
137                // Skip README.md files and only process .md files
138                if path.is_file()
139                    && let Some(file_name) = path.file_name()
140                {
141                    let file_name_str = file_name.to_string_lossy();
142                    if file_name_str.ends_with(".md") && file_name_str != "README.md" {
143                        let content = fs::read_to_string(&path).with_context(|| {
144                            format!("could not read decision file `{}`", path.display())
145                        })?;
146                        decisions.push(content);
147                    }
148                }
149            }
150            break; // If we found one of the directories, don't check the other
151        }
152    }
153
154    Ok(decisions)
155}
156
157/// Count the number of files in a feature directory (excluding documentation)
158fn count_files(feature_path: &Path, nested_feature_paths: &[String]) -> usize {
159    let mut file_count = 0;
160
161    if let Ok(entries) = fs::read_dir(feature_path) {
162        for entry in entries.flatten() {
163            let path = entry.path();
164            let path_str = path.to_string_lossy().to_string();
165
166            // Skip documentation directories
167            if is_documentation_directory(&path) {
168                continue;
169            }
170
171            // Skip nested feature directories
172            if nested_feature_paths
173                .iter()
174                .any(|nfp| path_str.starts_with(nfp))
175            {
176                continue;
177            }
178
179            if path.is_file() {
180                file_count += 1;
181            } else if path.is_dir() {
182                // Recursively count files in subdirectories
183                file_count += count_files(&path, nested_feature_paths);
184            }
185        }
186    }
187
188    file_count
189}
190
191/// Count the total number of lines in all files in a feature directory (excluding documentation)
192fn count_lines(feature_path: &Path, nested_feature_paths: &[String]) -> usize {
193    let mut line_count = 0;
194
195    if let Ok(entries) = fs::read_dir(feature_path) {
196        for entry in entries.flatten() {
197            let path = entry.path();
198            let path_str = path.to_string_lossy().to_string();
199
200            // Skip documentation directories
201            if is_documentation_directory(&path) {
202                continue;
203            }
204
205            // Skip nested feature directories
206            if nested_feature_paths
207                .iter()
208                .any(|nfp| path_str.starts_with(nfp))
209            {
210                continue;
211            }
212
213            if path.is_file() {
214                // Try to read the file and count lines
215                if let Ok(content) = fs::read_to_string(&path) {
216                    line_count += content.lines().count();
217                }
218            } else if path.is_dir() {
219                // Recursively count lines in subdirectories
220                line_count += count_lines(&path, nested_feature_paths);
221            }
222        }
223    }
224
225    line_count
226}
227
228/// Count the total number of TODO comments in all files in a feature directory (excluding documentation)
229fn count_todos(feature_path: &Path, nested_feature_paths: &[String]) -> usize {
230    let mut todo_count = 0;
231
232    if let Ok(entries) = fs::read_dir(feature_path) {
233        for entry in entries.flatten() {
234            let path = entry.path();
235            let path_str = path.to_string_lossy().to_string();
236
237            // Skip documentation directories
238            if is_documentation_directory(&path) {
239                continue;
240            }
241
242            // Skip nested feature directories
243            if nested_feature_paths
244                .iter()
245                .any(|nfp| path_str.starts_with(nfp))
246            {
247                continue;
248            }
249
250            if path.is_file() {
251                // Try to read the file and count TODO comments
252                if let Ok(content) = fs::read_to_string(&path) {
253                    for line in content.lines() {
254                        // Look for TODO in comments (case-insensitive)
255                        let line_upper = line.to_uppercase();
256                        if line_upper.contains("TODO") {
257                            todo_count += 1;
258                        }
259                    }
260                }
261            } else if path.is_dir() {
262                // Recursively count TODOs in subdirectories
263                todo_count += count_todos(&path, nested_feature_paths);
264            }
265        }
266    }
267
268    todo_count
269}
270
271/// Get the paths affected by a specific commit
272fn get_commit_affected_paths(repo: &Repository, commit_hash: &str) -> Vec<String> {
273    let Ok(oid) = git2::Oid::from_str(commit_hash) else {
274        return Vec::new();
275    };
276
277    let Ok(commit) = repo.find_commit(oid) else {
278        return Vec::new();
279    };
280
281    let mut paths = Vec::new();
282
283    // For the first commit (no parents), get all files in the tree
284    if commit.parent_count() == 0 {
285        if let Ok(tree) = commit.tree() {
286            collect_all_tree_paths(repo, &tree, "", &mut paths);
287        }
288        return paths;
289    }
290
291    // For commits with parents, check the diff
292    let Ok(tree) = commit.tree() else {
293        return Vec::new();
294    };
295
296    let Ok(parent) = commit.parent(0) else {
297        return Vec::new();
298    };
299
300    let Ok(parent_tree) = parent.tree() else {
301        return Vec::new();
302    };
303
304    if let Ok(diff) = repo.diff_tree_to_tree(Some(&parent_tree), Some(&tree), None) {
305        let _ = diff.foreach(
306            &mut |delta, _| {
307                if let Some(path) = delta.new_file().path()
308                    && let Some(path_str) = path.to_str()
309                {
310                    paths.push(path_str.to_string());
311                }
312                if let Some(path) = delta.old_file().path()
313                    && let Some(path_str) = path.to_str()
314                    && !paths.contains(&path_str.to_string())
315                {
316                    paths.push(path_str.to_string());
317                }
318                true
319            },
320            None,
321            None,
322            None,
323        );
324    }
325
326    paths
327}
328
329/// Collect all file paths in a tree (helper for get_commit_affected_paths)
330fn collect_all_tree_paths(
331    repo: &Repository,
332    tree: &git2::Tree,
333    prefix: &str,
334    paths: &mut Vec<String>,
335) {
336    for entry in tree.iter() {
337        if let Some(name) = entry.name() {
338            let path = if prefix.is_empty() {
339                name.to_string()
340            } else {
341                format!("{}/{}", prefix, name)
342            };
343
344            paths.push(path.clone());
345
346            if entry.kind() == Some(git2::ObjectType::Tree)
347                && let Ok(obj) = entry.to_object(repo)
348                && let Ok(subtree) = obj.peel_to_tree()
349            {
350                collect_all_tree_paths(repo, &subtree, &path, paths);
351            }
352        }
353    }
354}
355
356/// Compute statistics from changes for a feature
357fn compute_stats_from_changes(
358    changes: &[Change],
359    feature_path: &Path,
360    nested_features: &[Feature],
361) -> Option<Stats> {
362    if changes.is_empty() {
363        return None;
364    }
365
366    // Collect paths of nested features to exclude from commit counts
367    let nested_feature_paths: Vec<String> =
368        nested_features.iter().map(|f| f.path.clone()).collect();
369
370    // Get repository to check commit details
371    let repo = Repository::discover(feature_path).ok();
372
373    // Get the feature's relative path from repo root
374    let feature_relative_path = if let Some(ref r) = repo {
375        if let Ok(canonical_path) = std::fs::canonicalize(feature_path) {
376            if let Some(workdir) = r.workdir() {
377                canonical_path
378                    .strip_prefix(workdir)
379                    .ok()
380                    .map(|p| p.to_string_lossy().to_string())
381            } else {
382                None
383            }
384        } else {
385            None
386        }
387    } else {
388        None
389    };
390
391    // Filter changes to only include those that affect files in this feature
392    // (not exclusively in nested features)
393    let filtered_changes: Vec<&Change> = changes
394        .iter()
395        .filter(|change| {
396            // If we don't have repo access, include all changes
397            let Some(ref r) = repo else {
398                return true;
399            };
400
401            let Some(ref feature_rel_path) = feature_relative_path else {
402                return true;
403            };
404
405            // Get the files affected by this commit
406            let affected_files = get_commit_affected_paths(r, &change.hash);
407
408            // Check if any affected file is in this feature but not in a nested feature
409            affected_files.iter().any(|file_path| {
410                // File must be in this feature
411                let in_feature = file_path.starts_with(feature_rel_path);
412
413                // File must not be exclusively in a nested feature
414                let in_nested = nested_feature_paths.iter().any(|nested_path| {
415                    // Convert nested absolute path to relative path
416                    if let Ok(nested_canonical) = std::fs::canonicalize(nested_path)
417                        && let Some(workdir) = r.workdir()
418                        && let Ok(nested_rel) = nested_canonical.strip_prefix(workdir)
419                    {
420                        let nested_rel_str = nested_rel.to_string_lossy();
421                        return file_path.starts_with(nested_rel_str.as_ref());
422                    }
423                    false
424                });
425
426                in_feature && !in_nested
427            })
428        })
429        .collect();
430
431    let mut commits = HashMap::new();
432
433    // Add total commit count
434    commits.insert(
435        "total_commits".to_string(),
436        serde_json::json!(filtered_changes.len()),
437    );
438
439    // Count commits by author
440    let mut authors_count: HashMap<String, usize> = HashMap::new();
441    for change in &filtered_changes {
442        *authors_count.entry(change.author_name.clone()).or_insert(0) += 1;
443    }
444    commits.insert(
445        "authors_count".to_string(),
446        serde_json::json!(authors_count),
447    );
448
449    // Count commits by conventional commit type
450    let mut count_by_type: HashMap<String, usize> = HashMap::new();
451    for change in &filtered_changes {
452        let commit_type = extract_commit_type(&change.title);
453        *count_by_type.entry(commit_type).or_insert(0) += 1;
454    }
455    commits.insert(
456        "count_by_type".to_string(),
457        serde_json::json!(count_by_type),
458    );
459
460    // Get first and last commit dates
461    if let Some(first) = filtered_changes.first() {
462        commits.insert(
463            "first_commit_date".to_string(),
464            serde_json::json!(first.date.clone()),
465        );
466    }
467    if let Some(last) = filtered_changes.last() {
468        commits.insert(
469            "last_commit_date".to_string(),
470            serde_json::json!(last.date.clone()),
471        );
472    }
473
474    // Count files and lines in the feature directory (excluding nested features)
475    let files_count = count_files(feature_path, &nested_feature_paths);
476    let lines_count = count_lines(feature_path, &nested_feature_paths);
477    let todos_count = count_todos(feature_path, &nested_feature_paths);
478
479    Some(Stats {
480        files_count: Some(files_count),
481        lines_count: Some(lines_count),
482        todos_count: Some(todos_count),
483        commits,
484        coverage: None,
485    })
486}
487
488/// Extract the commit type from a conventional commit title
489fn extract_commit_type(title: &str) -> String {
490    // Common conventional commit types
491    let known_types = [
492        "feat", "fix", "docs", "style", "refactor", "perf", "test", "build", "ci", "chore",
493        "revert",
494    ];
495
496    // Check if the title follows conventional commit format (type: description or type(scope): description)
497    if let Some(colon_pos) = title.find(':') {
498        let prefix = &title[..colon_pos];
499
500        // Remove scope if present (e.g., "feat(auth)" -> "feat")
501        let type_part = if let Some(paren_pos) = prefix.find('(') {
502            &prefix[..paren_pos]
503        } else {
504            prefix
505        };
506
507        let type_part = type_part.trim().to_lowercase();
508
509        // Check if it's a known conventional commit type
510        if known_types.contains(&type_part.as_str()) {
511            return type_part;
512        }
513    }
514
515    // If not a conventional commit, return "other"
516    "other".to_string()
517}
518
519fn process_feature_directory(
520    path: &Path,
521    base_path: &Path,
522    name: &str,
523    changes_map: Option<&HashMap<String, Vec<Change>>>,
524    parent_owner: Option<&str>,
525    feature_metadata_map: &FeatureMetadataMap,
526) -> Result<Feature> {
527    // Try to find and read README file, use defaults if not found
528    let mut readme_info = if let Some(readme_path) = find_readme_file(path) {
529        read_readme_info(&readme_path)?
530    } else {
531        use crate::readme_parser::ReadmeInfo;
532        ReadmeInfo {
533            title: None,
534            owner: "".to_string(),
535            description: "".to_string(),
536            meta: std::collections::HashMap::new(),
537        }
538    };
539
540    // Remove the 'feature' key from meta if it exists (it's redundant since we know it's a feature)
541    readme_info.meta.remove("feature");
542
543    // Get the folder name (the last component of the path)
544    let folder_name = path.file_name().and_then(|n| n.to_str()).unwrap_or(name);
545
546    // Check if this feature has any metadata from the global scan (matched by folder name)
547    if let Some(metadata_map) = feature_metadata_map.get(folder_name) {
548        // Iterate through each metadata key (e.g., "feature-flag", "feature-experiment")
549        for (metadata_key, flags) in metadata_map {
550            // Convert Vec<HashMap<String, String>> to JSON array
551            let flags_json: Vec<serde_json::Value> = flags
552                .iter()
553                .map(|flag_map| {
554                    let json_map: serde_json::Map<String, serde_json::Value> = flag_map
555                        .iter()
556                        .map(|(k, v)| (k.clone(), serde_json::Value::String(v.clone())))
557                        .collect();
558                    serde_json::Value::Object(json_map)
559                })
560                .collect();
561
562            // Check if this metadata key already exists, append if it does
563            readme_info
564                .meta
565                .entry(metadata_key.clone())
566                .and_modify(|existing| {
567                    if let serde_json::Value::Array(arr) = existing {
568                        arr.extend(flags_json.clone());
569                    }
570                })
571                .or_insert_with(|| serde_json::Value::Array(flags_json));
572        }
573    }
574
575    let changes = if let Some(map) = changes_map {
576        // Convert the absolute path to a repo-relative path and look up changes
577        get_changes_for_path(path, map).unwrap_or_default()
578    } else {
579        Vec::new()
580    };
581
582    // Always include decisions regardless of include_changes flag
583    let decisions = read_decision_files(path).unwrap_or_default();
584
585    // Determine the actual owner and whether it's inherited
586    let (actual_owner, is_owner_inherited) = if readme_info.owner.is_empty() {
587        if let Some(parent) = parent_owner {
588            (parent.to_string(), true)
589        } else {
590            ("".to_string(), false)
591        }
592    } else {
593        (readme_info.owner.clone(), false)
594    };
595
596    // Check if this feature has nested features in a 'features' subdirectory
597    let nested_features_path = path.join("features");
598    let mut nested_features = if nested_features_path.exists() && nested_features_path.is_dir() {
599        list_files_recursive_impl(
600            &nested_features_path,
601            base_path,
602            changes_map,
603            Some(&actual_owner),
604            feature_metadata_map,
605        )
606        .unwrap_or_default()
607    } else {
608        Vec::new()
609    };
610
611    // Also check for nested features marked with feature: true in subdirectories
612    let entries = fs::read_dir(path)
613        .with_context(|| format!("could not read directory `{}`", path.display()))?;
614
615    let mut entries: Vec<_> = entries.collect::<Result<_, _>>()?;
616    entries.sort_by_key(|entry| entry.path());
617
618    for entry in entries {
619        let entry_path = entry.path();
620        let entry_name = entry_path.file_name().unwrap().to_string_lossy();
621
622        if entry_path.is_dir()
623            && entry_name != "features" // Don't process 'features' folder twice
624            && !is_documentation_directory(&entry_path)
625        {
626            if has_feature_flag_in_readme(&entry_path) {
627                // This directory is a feature itself
628                let nested_feature = process_feature_directory(
629                    &entry_path,
630                    base_path,
631                    &entry_name,
632                    changes_map,
633                    Some(&actual_owner),
634                    feature_metadata_map,
635                )?;
636                nested_features.push(nested_feature);
637            } else {
638                // This directory is not a feature, but might contain features
639                // Recursively search for features inside it
640                let deeper_features = list_files_recursive_impl(
641                    &entry_path,
642                    base_path,
643                    changes_map,
644                    Some(&actual_owner),
645                    feature_metadata_map,
646                )?;
647                nested_features.extend(deeper_features);
648            }
649        }
650    }
651
652    // Compute stats from changes if available
653    let stats = compute_stats_from_changes(&changes, path, &nested_features);
654
655    // Make path relative to base_path
656    let relative_path = path
657        .strip_prefix(base_path)
658        .unwrap_or(path)
659        .to_string_lossy()
660        .to_string();
661
662    Ok(Feature {
663        name: readme_info.title.unwrap_or_else(|| name.to_string()),
664        description: readme_info.description,
665        owner: actual_owner,
666        is_owner_inherited,
667        path: relative_path,
668        features: nested_features,
669        meta: readme_info.meta,
670        changes,
671        decisions,
672        stats,
673    })
674}
675
676fn list_files_recursive_impl(
677    dir: &Path,
678    base_path: &Path,
679    changes_map: Option<&HashMap<String, Vec<Change>>>,
680    parent_owner: Option<&str>,
681    feature_metadata_map: &FeatureMetadataMap,
682) -> Result<Vec<Feature>> {
683    let entries = fs::read_dir(dir)
684        .with_context(|| format!("could not read directory `{}`", dir.display()))?;
685
686    let mut entries: Vec<_> = entries.collect::<Result<_, _>>()?;
687    entries.sort_by_key(|entry| entry.path());
688
689    let mut features: Vec<Feature> = Vec::new();
690
691    for entry in entries {
692        let path = entry.path();
693        let name = path.file_name().unwrap().to_string_lossy();
694
695        if path.is_dir() {
696            if is_feature_directory(&path) {
697                let feature = process_feature_directory(
698                    &path,
699                    base_path,
700                    &name,
701                    changes_map,
702                    parent_owner,
703                    feature_metadata_map,
704                )?;
705                features.push(feature);
706            } else if !is_documentation_directory(&path)
707                && !is_inside_documentation_directory(&path)
708            {
709                // Recursively search for features in non-documentation subdirectories
710                let new_features = list_files_recursive_impl(
711                    &path,
712                    base_path,
713                    changes_map,
714                    parent_owner,
715                    feature_metadata_map,
716                )?;
717                features.extend(new_features);
718            }
719        }
720    }
721
722    Ok(features)
723}
724
725/// Get changes for a specific path from the pre-computed changes map
726fn get_changes_for_path(
727    path: &Path,
728    changes_map: &HashMap<String, Vec<Change>>,
729) -> Result<Vec<Change>> {
730    // Canonicalize the path
731    let canonical_path = std::fs::canonicalize(path)?;
732
733    // Find the repository and get the working directory
734    let repo = Repository::discover(path)?;
735    let repo_workdir = repo
736        .workdir()
737        .context("repository has no working directory")?;
738
739    // Convert to relative path from repo root
740    let relative_path = canonical_path
741        .strip_prefix(repo_workdir)
742        .context("path is not within repository")?;
743
744    let relative_path_str = relative_path.to_string_lossy().to_string();
745
746    // Look up the changes in the map
747    Ok(changes_map
748        .get(&relative_path_str)
749        .cloned()
750        .unwrap_or_default())
751}
752
753#[cfg(test)]
754mod tests {
755    use super::*;
756
757    #[test]
758    fn test_extract_commit_type() {
759        // Test standard conventional commit types
760        assert_eq!(extract_commit_type("feat: add new feature"), "feat");
761        assert_eq!(extract_commit_type("fix: resolve bug"), "fix");
762        assert_eq!(extract_commit_type("docs: update README"), "docs");
763        assert_eq!(extract_commit_type("style: format code"), "style");
764        assert_eq!(
765            extract_commit_type("refactor: improve structure"),
766            "refactor"
767        );
768        assert_eq!(extract_commit_type("perf: optimize performance"), "perf");
769        assert_eq!(extract_commit_type("test: add unit tests"), "test");
770        assert_eq!(extract_commit_type("build: update dependencies"), "build");
771        assert_eq!(extract_commit_type("ci: fix CI pipeline"), "ci");
772        assert_eq!(extract_commit_type("chore: update gitignore"), "chore");
773        assert_eq!(
774            extract_commit_type("revert: undo previous commit"),
775            "revert"
776        );
777
778        // Test with scope
779        assert_eq!(extract_commit_type("feat(auth): add login"), "feat");
780        assert_eq!(
781            extract_commit_type("fix(api): resolve endpoint issue"),
782            "fix"
783        );
784        assert_eq!(
785            extract_commit_type("docs(readme): update instructions"),
786            "docs"
787        );
788
789        // Test case insensitivity
790        assert_eq!(extract_commit_type("FEAT: uppercase type"), "feat");
791        assert_eq!(extract_commit_type("Fix: mixed case"), "fix");
792        assert_eq!(extract_commit_type("DOCS: all caps"), "docs");
793
794        // Test non-conventional commits
795        assert_eq!(extract_commit_type("random commit message"), "other");
796        assert_eq!(extract_commit_type("update: not conventional"), "other");
797        assert_eq!(
798            extract_commit_type("feature: close but not standard"),
799            "other"
800        );
801        assert_eq!(extract_commit_type("no colon here"), "other");
802        assert_eq!(extract_commit_type(""), "other");
803
804        // Test edge cases
805        assert_eq!(extract_commit_type("feat:no space after colon"), "feat");
806        assert_eq!(extract_commit_type("feat  : extra spaces"), "feat");
807        assert_eq!(
808            extract_commit_type("feat(scope)(weird): nested parens"),
809            "feat"
810        );
811    }
812}