1use anyhow::{Context, Result};
2use git2::Repository;
3use std::collections::HashMap;
4use std::fs;
5use std::path::Path;
6
7use crate::dependency_resolver::{
8 build_file_to_feature_map, collect_feature_info, resolve_feature_dependencies,
9};
10use crate::feature_metadata_detector::{self, FeatureMetadataMap};
11use crate::git_helper::get_all_commits_by_path;
12use crate::import_detector::{ImportStatement, build_file_map, scan_file_for_imports};
13use crate::models::{Change, Feature, Stats};
14use crate::readme_parser::read_readme_info;
15
16fn is_documentation_directory(dir_path: &Path) -> bool {
17 let dir_name = dir_path
18 .file_name()
19 .and_then(|name| name.to_str())
20 .unwrap_or("");
21
22 let doc_dirs = ["docs", "__docs__", ".docs"];
24
25 doc_dirs.contains(&dir_name.to_lowercase().as_str())
26}
27
28fn is_inside_documentation_directory(dir_path: &Path) -> bool {
29 for ancestor in dir_path.ancestors().skip(1) {
31 if is_documentation_directory(ancestor) {
32 return true;
33 }
34 }
35 false
36}
37
38fn is_direct_subfolder_of_features(dir_path: &Path) -> bool {
39 if let Some(parent) = dir_path.parent()
40 && let Some(parent_name) = parent.file_name().and_then(|name| name.to_str())
41 {
42 return parent_name == "features";
43 }
44 false
45}
46
47fn find_readme_file(dir_path: &Path) -> Option<std::path::PathBuf> {
48 let readme_candidates = ["README.md", "README.mdx"];
49
50 for candidate in &readme_candidates {
51 let readme_path = dir_path.join(candidate);
52 if readme_path.exists() {
53 return Some(readme_path);
54 }
55 }
56
57 None
58}
59
60fn has_feature_flag_in_readme(dir_path: &Path) -> bool {
62 if let Some(readme_path) = find_readme_file(dir_path)
63 && let Ok(content) = fs::read_to_string(&readme_path)
64 {
65 if let Some(stripped) = content.strip_prefix("---\n")
67 && let Some(end_pos) = stripped.find("\n---\n")
68 {
69 let yaml_content = &stripped[..end_pos];
70
71 if let Ok(yaml_value) = serde_yaml::from_str::<serde_yaml::Value>(yaml_content)
73 && let Some(mapping) = yaml_value.as_mapping()
74 {
75 if let Some(feature_value) =
77 mapping.get(serde_yaml::Value::String("feature".to_string()))
78 {
79 return feature_value.as_bool() == Some(true);
80 }
81 }
82 }
83 }
84 false
85}
86
87fn is_feature_directory(dir_path: &Path) -> bool {
89 if is_documentation_directory(dir_path) || is_inside_documentation_directory(dir_path) {
91 return false;
92 }
93
94 if is_direct_subfolder_of_features(dir_path) {
96 return true;
97 }
98
99 has_feature_flag_in_readme(dir_path)
101}
102
103pub fn list_files_recursive(dir: &Path) -> Result<Vec<Feature>> {
104 let feature_metadata =
106 feature_metadata_detector::scan_directory_for_feature_metadata(dir).unwrap_or_default();
107
108 let mut features = list_files_recursive_impl(dir, dir, None, None, &feature_metadata)?;
110
111 populate_dependencies(&mut features, dir)?;
113
114 Ok(features)
115}
116
117pub fn list_files_recursive_with_changes(dir: &Path) -> Result<Vec<Feature>> {
118 let all_commits = get_all_commits_by_path(dir).unwrap_or_default();
120 let feature_metadata =
122 feature_metadata_detector::scan_directory_for_feature_metadata(dir).unwrap_or_default();
123
124 let mut features =
126 list_files_recursive_impl(dir, dir, Some(&all_commits), None, &feature_metadata)?;
127
128 populate_dependencies(&mut features, dir)?;
130
131 Ok(features)
132}
133
134fn populate_dependencies(features: &mut [Feature], base_path: &Path) -> Result<()> {
136 let file_map = build_file_map(base_path);
138
139 let mut feature_info_list = Vec::new();
141 collect_feature_info(features, None, &mut feature_info_list);
142
143 let file_to_feature_map = build_file_to_feature_map(&feature_info_list, base_path);
145
146 let mut feature_path_to_name_map = HashMap::new();
148 for info in &feature_info_list {
149 feature_path_to_name_map.insert(info.path.to_string_lossy().to_string(), info.name.clone());
150 }
151
152 let mut feature_imports: HashMap<String, Vec<ImportStatement>> = HashMap::new();
154
155 for feature_info in &feature_info_list {
156 let feature_path = base_path.join(&feature_info.path);
157 let imports = scan_feature_directory_for_imports(&feature_path);
158 feature_imports.insert(feature_info.name.clone(), imports);
159 }
160
161 populate_dependencies_recursive(
163 features,
164 base_path,
165 &feature_imports,
166 &file_to_feature_map,
167 &feature_path_to_name_map,
168 &file_map,
169 );
170
171 Ok(())
172}
173
174fn scan_feature_directory_for_imports(feature_path: &Path) -> Vec<ImportStatement> {
176 let mut all_imports = Vec::new();
177
178 if let Ok(entries) = fs::read_dir(feature_path) {
179 for entry in entries.flatten() {
180 let path = entry.path();
181
182 if is_documentation_directory(&path) {
184 continue;
185 }
186
187 if path.is_file() {
188 if let Ok(imports) = scan_file_for_imports(&path) {
189 all_imports.extend(imports);
190 }
191 } else if path.is_dir() {
192 let dir_name = path.file_name().and_then(|n| n.to_str()).unwrap_or("");
194 if dir_name == "features" {
195 continue;
196 }
197
198 if !has_feature_flag_in_readme(&path) {
200 let nested_imports = scan_feature_directory_for_imports(&path);
201 all_imports.extend(nested_imports);
202 }
203 }
204 }
205 }
206
207 all_imports
208}
209
210fn populate_dependencies_recursive(
212 features: &mut [Feature],
213 base_path: &Path,
214 feature_imports: &HashMap<String, Vec<ImportStatement>>,
215 file_to_feature_map: &HashMap<std::path::PathBuf, String>,
216 feature_path_to_name_map: &HashMap<String, String>,
217 file_map: &HashMap<String, std::path::PathBuf>,
218) {
219 for feature in features {
220 if let Some(imports) = feature_imports.get(&feature.name) {
222 let feature_path = std::path::PathBuf::from(&feature.path);
223
224 let dependencies = resolve_feature_dependencies(
226 &feature.name,
227 &feature_path,
228 base_path,
229 imports,
230 file_to_feature_map,
231 feature_path_to_name_map,
232 file_map,
233 );
234
235 feature.dependencies = dependencies;
236 }
237
238 if !feature.features.is_empty() {
240 populate_dependencies_recursive(
241 &mut feature.features,
242 base_path,
243 feature_imports,
244 file_to_feature_map,
245 feature_path_to_name_map,
246 file_map,
247 );
248 }
249 }
250}
251
252fn read_decision_files(feature_path: &Path) -> Result<Vec<String>> {
253 let mut decisions = Vec::new();
254
255 let decision_paths = [
257 feature_path.join(".docs").join("decisions"),
258 feature_path.join("__docs__").join("decisions"),
259 ];
260
261 for decisions_dir in &decision_paths {
262 if decisions_dir.exists() && decisions_dir.is_dir() {
263 let entries = fs::read_dir(decisions_dir).with_context(|| {
264 format!(
265 "could not read decisions directory `{}`",
266 decisions_dir.display()
267 )
268 })?;
269
270 for entry in entries {
271 let entry = entry?;
272 let path = entry.path();
273
274 if path.is_file()
276 && let Some(file_name) = path.file_name()
277 {
278 let file_name_str = file_name.to_string_lossy();
279 if file_name_str.ends_with(".md") && file_name_str != "README.md" {
280 let content = fs::read_to_string(&path).with_context(|| {
281 format!("could not read decision file `{}`", path.display())
282 })?;
283 decisions.push(content);
284 }
285 }
286 }
287 break; }
289 }
290
291 Ok(decisions)
292}
293
294fn count_files(feature_path: &Path, nested_feature_paths: &[String]) -> usize {
296 let mut file_count = 0;
297
298 if let Ok(entries) = fs::read_dir(feature_path) {
299 for entry in entries.flatten() {
300 let path = entry.path();
301 let path_str = path.to_string_lossy().to_string();
302
303 if is_documentation_directory(&path) {
305 continue;
306 }
307
308 if nested_feature_paths
310 .iter()
311 .any(|nfp| path_str.starts_with(nfp))
312 {
313 continue;
314 }
315
316 if path.is_file() {
317 file_count += 1;
318 } else if path.is_dir() {
319 file_count += count_files(&path, nested_feature_paths);
321 }
322 }
323 }
324
325 file_count
326}
327
328fn count_lines(feature_path: &Path, nested_feature_paths: &[String]) -> usize {
330 let mut line_count = 0;
331
332 if let Ok(entries) = fs::read_dir(feature_path) {
333 for entry in entries.flatten() {
334 let path = entry.path();
335 let path_str = path.to_string_lossy().to_string();
336
337 if is_documentation_directory(&path) {
339 continue;
340 }
341
342 if nested_feature_paths
344 .iter()
345 .any(|nfp| path_str.starts_with(nfp))
346 {
347 continue;
348 }
349
350 if path.is_file() {
351 if let Ok(content) = fs::read_to_string(&path) {
353 line_count += content.lines().count();
354 }
355 } else if path.is_dir() {
356 line_count += count_lines(&path, nested_feature_paths);
358 }
359 }
360 }
361
362 line_count
363}
364
365fn count_todos(feature_path: &Path, nested_feature_paths: &[String]) -> usize {
367 let mut todo_count = 0;
368
369 if let Ok(entries) = fs::read_dir(feature_path) {
370 for entry in entries.flatten() {
371 let path = entry.path();
372 let path_str = path.to_string_lossy().to_string();
373
374 if is_documentation_directory(&path) {
376 continue;
377 }
378
379 if nested_feature_paths
381 .iter()
382 .any(|nfp| path_str.starts_with(nfp))
383 {
384 continue;
385 }
386
387 if path.is_file() {
388 if let Ok(content) = fs::read_to_string(&path) {
390 for line in content.lines() {
391 let line_upper = line.to_uppercase();
393 if line_upper.contains("TODO") {
394 todo_count += 1;
395 }
396 }
397 }
398 } else if path.is_dir() {
399 todo_count += count_todos(&path, nested_feature_paths);
401 }
402 }
403 }
404
405 todo_count
406}
407
408fn get_commit_affected_paths(repo: &Repository, commit_hash: &str) -> Vec<String> {
410 let Ok(oid) = git2::Oid::from_str(commit_hash) else {
411 return Vec::new();
412 };
413
414 let Ok(commit) = repo.find_commit(oid) else {
415 return Vec::new();
416 };
417
418 let mut paths = Vec::new();
419
420 if commit.parent_count() == 0 {
422 if let Ok(tree) = commit.tree() {
423 collect_all_tree_paths(repo, &tree, "", &mut paths);
424 }
425 return paths;
426 }
427
428 let Ok(tree) = commit.tree() else {
430 return Vec::new();
431 };
432
433 let Ok(parent) = commit.parent(0) else {
434 return Vec::new();
435 };
436
437 let Ok(parent_tree) = parent.tree() else {
438 return Vec::new();
439 };
440
441 if let Ok(diff) = repo.diff_tree_to_tree(Some(&parent_tree), Some(&tree), None) {
442 let _ = diff.foreach(
443 &mut |delta, _| {
444 if let Some(path) = delta.new_file().path()
445 && let Some(path_str) = path.to_str()
446 {
447 paths.push(path_str.to_string());
448 }
449 if let Some(path) = delta.old_file().path()
450 && let Some(path_str) = path.to_str()
451 && !paths.contains(&path_str.to_string())
452 {
453 paths.push(path_str.to_string());
454 }
455 true
456 },
457 None,
458 None,
459 None,
460 );
461 }
462
463 paths
464}
465
466fn collect_all_tree_paths(
468 repo: &Repository,
469 tree: &git2::Tree,
470 prefix: &str,
471 paths: &mut Vec<String>,
472) {
473 for entry in tree.iter() {
474 if let Some(name) = entry.name() {
475 let path = if prefix.is_empty() {
476 name.to_string()
477 } else {
478 format!("{}/{}", prefix, name)
479 };
480
481 paths.push(path.clone());
482
483 if entry.kind() == Some(git2::ObjectType::Tree)
484 && let Ok(obj) = entry.to_object(repo)
485 && let Ok(subtree) = obj.peel_to_tree()
486 {
487 collect_all_tree_paths(repo, &subtree, &path, paths);
488 }
489 }
490 }
491}
492
493fn compute_stats_from_changes(
495 changes: &[Change],
496 feature_path: &Path,
497 nested_features: &[Feature],
498) -> Option<Stats> {
499 if changes.is_empty() {
500 return None;
501 }
502
503 let nested_feature_paths: Vec<String> =
505 nested_features.iter().map(|f| f.path.clone()).collect();
506
507 let repo = Repository::discover(feature_path).ok();
509
510 let feature_relative_path = if let Some(ref r) = repo {
512 if let Ok(canonical_path) = std::fs::canonicalize(feature_path) {
513 if let Some(workdir) = r.workdir() {
514 canonical_path
515 .strip_prefix(workdir)
516 .ok()
517 .map(|p| p.to_string_lossy().to_string())
518 } else {
519 None
520 }
521 } else {
522 None
523 }
524 } else {
525 None
526 };
527
528 let filtered_changes: Vec<&Change> = changes
531 .iter()
532 .filter(|change| {
533 let Some(ref r) = repo else {
535 return true;
536 };
537
538 let Some(ref feature_rel_path) = feature_relative_path else {
539 return true;
540 };
541
542 let affected_files = get_commit_affected_paths(r, &change.hash);
544
545 affected_files.iter().any(|file_path| {
547 let in_feature = file_path.starts_with(feature_rel_path);
549
550 let in_nested = nested_feature_paths.iter().any(|nested_path| {
552 if let Ok(nested_canonical) = std::fs::canonicalize(nested_path)
554 && let Some(workdir) = r.workdir()
555 && let Ok(nested_rel) = nested_canonical.strip_prefix(workdir)
556 {
557 let nested_rel_str = nested_rel.to_string_lossy();
558 return file_path.starts_with(nested_rel_str.as_ref());
559 }
560 false
561 });
562
563 in_feature && !in_nested
564 })
565 })
566 .collect();
567
568 let mut commits = HashMap::new();
569
570 commits.insert(
572 "total_commits".to_string(),
573 serde_json::json!(filtered_changes.len()),
574 );
575
576 let mut authors_count: HashMap<String, usize> = HashMap::new();
578 for change in &filtered_changes {
579 *authors_count.entry(change.author_name.clone()).or_insert(0) += 1;
580 }
581 commits.insert(
582 "authors_count".to_string(),
583 serde_json::json!(authors_count),
584 );
585
586 let mut count_by_type: HashMap<String, usize> = HashMap::new();
588 for change in &filtered_changes {
589 let commit_type = extract_commit_type(&change.title);
590 *count_by_type.entry(commit_type).or_insert(0) += 1;
591 }
592 commits.insert(
593 "count_by_type".to_string(),
594 serde_json::json!(count_by_type),
595 );
596
597 if let Some(first) = filtered_changes.first() {
599 commits.insert(
600 "first_commit_date".to_string(),
601 serde_json::json!(first.date.clone()),
602 );
603 }
604 if let Some(last) = filtered_changes.last() {
605 commits.insert(
606 "last_commit_date".to_string(),
607 serde_json::json!(last.date.clone()),
608 );
609 }
610
611 let files_count = count_files(feature_path, &nested_feature_paths);
613 let lines_count = count_lines(feature_path, &nested_feature_paths);
614 let todos_count = count_todos(feature_path, &nested_feature_paths);
615
616 Some(Stats {
617 files_count: Some(files_count),
618 lines_count: Some(lines_count),
619 todos_count: Some(todos_count),
620 commits,
621 coverage: None,
622 })
623}
624
625fn extract_commit_type(title: &str) -> String {
627 let known_types = [
629 "feat", "fix", "docs", "style", "refactor", "perf", "test", "build", "ci", "chore",
630 "revert",
631 ];
632
633 if let Some(colon_pos) = title.find(':') {
635 let prefix = &title[..colon_pos];
636
637 let type_part = if let Some(paren_pos) = prefix.find('(') {
639 &prefix[..paren_pos]
640 } else {
641 prefix
642 };
643
644 let type_part = type_part.trim().to_lowercase();
645
646 if known_types.contains(&type_part.as_str()) {
648 return type_part;
649 }
650 }
651
652 "other".to_string()
654}
655
656fn process_feature_directory(
657 path: &Path,
658 base_path: &Path,
659 name: &str,
660 changes_map: Option<&HashMap<String, Vec<Change>>>,
661 parent_owner: Option<&str>,
662 feature_metadata_map: &FeatureMetadataMap,
663) -> Result<Feature> {
664 let mut readme_info = if let Some(readme_path) = find_readme_file(path) {
666 read_readme_info(&readme_path)?
667 } else {
668 use crate::readme_parser::ReadmeInfo;
669 ReadmeInfo {
670 title: None,
671 owner: "".to_string(),
672 description: "".to_string(),
673 meta: std::collections::HashMap::new(),
674 }
675 };
676
677 readme_info.meta.remove("feature");
679
680 let folder_name = path.file_name().and_then(|n| n.to_str()).unwrap_or(name);
682
683 if let Some(metadata_map) = feature_metadata_map.get(folder_name) {
685 for (metadata_key, flags) in metadata_map {
687 let flags_json: Vec<serde_json::Value> = flags
689 .iter()
690 .map(|flag_map| {
691 let json_map: serde_json::Map<String, serde_json::Value> = flag_map
692 .iter()
693 .map(|(k, v)| (k.clone(), serde_json::Value::String(v.clone())))
694 .collect();
695 serde_json::Value::Object(json_map)
696 })
697 .collect();
698
699 readme_info
701 .meta
702 .entry(metadata_key.clone())
703 .and_modify(|existing| {
704 if let serde_json::Value::Array(arr) = existing {
705 arr.extend(flags_json.clone());
706 }
707 })
708 .or_insert_with(|| serde_json::Value::Array(flags_json));
709 }
710 }
711
712 let changes = if let Some(map) = changes_map {
713 get_changes_for_path(path, map).unwrap_or_default()
715 } else {
716 Vec::new()
717 };
718
719 let decisions = read_decision_files(path).unwrap_or_default();
721
722 let (actual_owner, is_owner_inherited) = if readme_info.owner.is_empty() {
724 if let Some(parent) = parent_owner {
725 (parent.to_string(), true)
726 } else {
727 ("".to_string(), false)
728 }
729 } else {
730 (readme_info.owner.clone(), false)
731 };
732
733 let nested_features_path = path.join("features");
735 let mut nested_features = if nested_features_path.exists() && nested_features_path.is_dir() {
736 list_files_recursive_impl(
737 &nested_features_path,
738 base_path,
739 changes_map,
740 Some(&actual_owner),
741 feature_metadata_map,
742 )
743 .unwrap_or_default()
744 } else {
745 Vec::new()
746 };
747
748 let entries = fs::read_dir(path)
750 .with_context(|| format!("could not read directory `{}`", path.display()))?;
751
752 let mut entries: Vec<_> = entries.collect::<Result<_, _>>()?;
753 entries.sort_by_key(|entry| entry.path());
754
755 for entry in entries {
756 let entry_path = entry.path();
757 let entry_name = entry_path.file_name().unwrap().to_string_lossy();
758
759 if entry_path.is_dir()
760 && entry_name != "features" && !is_documentation_directory(&entry_path)
762 {
763 if has_feature_flag_in_readme(&entry_path) {
764 let nested_feature = process_feature_directory(
766 &entry_path,
767 base_path,
768 &entry_name,
769 changes_map,
770 Some(&actual_owner),
771 feature_metadata_map,
772 )?;
773 nested_features.push(nested_feature);
774 } else {
775 let deeper_features = list_files_recursive_impl(
778 &entry_path,
779 base_path,
780 changes_map,
781 Some(&actual_owner),
782 feature_metadata_map,
783 )?;
784 nested_features.extend(deeper_features);
785 }
786 }
787 }
788
789 let nested_feature_paths: Vec<String> =
791 nested_features.iter().map(|f| f.path.clone()).collect();
792
793 let files_count = count_files(path, &nested_feature_paths);
795 let lines_count = count_lines(path, &nested_feature_paths);
796 let todos_count = count_todos(path, &nested_feature_paths);
797
798 let stats =
800 if let Some(change_stats) = compute_stats_from_changes(&changes, path, &nested_features) {
801 Some(change_stats)
803 } else {
804 Some(Stats {
806 files_count: Some(files_count),
807 lines_count: Some(lines_count),
808 todos_count: Some(todos_count),
809 commits: HashMap::new(),
810 coverage: None,
811 })
812 };
813
814 let relative_path = path
816 .strip_prefix(base_path)
817 .unwrap_or(path)
818 .to_string_lossy()
819 .to_string();
820
821 Ok(Feature {
822 name: readme_info.title.unwrap_or_else(|| name.to_string()),
823 description: readme_info.description,
824 owner: actual_owner,
825 is_owner_inherited,
826 path: relative_path,
827 features: nested_features,
828 meta: readme_info.meta,
829 changes,
830 decisions,
831 stats,
832 dependencies: Vec::new(), })
834}
835
836fn list_files_recursive_impl(
837 dir: &Path,
838 base_path: &Path,
839 changes_map: Option<&HashMap<String, Vec<Change>>>,
840 parent_owner: Option<&str>,
841 feature_metadata_map: &FeatureMetadataMap,
842) -> Result<Vec<Feature>> {
843 let entries = fs::read_dir(dir)
844 .with_context(|| format!("could not read directory `{}`", dir.display()))?;
845
846 let mut entries: Vec<_> = entries.collect::<Result<_, _>>()?;
847 entries.sort_by_key(|entry| entry.path());
848
849 let mut features: Vec<Feature> = Vec::new();
850
851 for entry in entries {
852 let path = entry.path();
853 let name = path.file_name().unwrap().to_string_lossy();
854
855 if path.is_dir() {
856 if is_feature_directory(&path) {
857 let feature = process_feature_directory(
858 &path,
859 base_path,
860 &name,
861 changes_map,
862 parent_owner,
863 feature_metadata_map,
864 )?;
865 features.push(feature);
866 } else if !is_documentation_directory(&path)
867 && !is_inside_documentation_directory(&path)
868 {
869 let new_features = list_files_recursive_impl(
871 &path,
872 base_path,
873 changes_map,
874 parent_owner,
875 feature_metadata_map,
876 )?;
877 features.extend(new_features);
878 }
879 }
880 }
881
882 Ok(features)
883}
884
885fn get_changes_for_path(
887 path: &Path,
888 changes_map: &HashMap<String, Vec<Change>>,
889) -> Result<Vec<Change>> {
890 let canonical_path = std::fs::canonicalize(path)?;
892
893 let repo = Repository::discover(path)?;
895 let repo_workdir = repo
896 .workdir()
897 .context("repository has no working directory")?;
898
899 let relative_path = canonical_path
901 .strip_prefix(repo_workdir)
902 .context("path is not within repository")?;
903
904 let relative_path_str = relative_path.to_string_lossy().to_string();
905
906 Ok(changes_map
908 .get(&relative_path_str)
909 .cloned()
910 .unwrap_or_default())
911}
912
913#[cfg(test)]
914mod tests {
915 use super::*;
916
917 #[test]
918 fn test_extract_commit_type() {
919 assert_eq!(extract_commit_type("feat: add new feature"), "feat");
921 assert_eq!(extract_commit_type("fix: resolve bug"), "fix");
922 assert_eq!(extract_commit_type("docs: update README"), "docs");
923 assert_eq!(extract_commit_type("style: format code"), "style");
924 assert_eq!(
925 extract_commit_type("refactor: improve structure"),
926 "refactor"
927 );
928 assert_eq!(extract_commit_type("perf: optimize performance"), "perf");
929 assert_eq!(extract_commit_type("test: add unit tests"), "test");
930 assert_eq!(extract_commit_type("build: update dependencies"), "build");
931 assert_eq!(extract_commit_type("ci: fix CI pipeline"), "ci");
932 assert_eq!(extract_commit_type("chore: update gitignore"), "chore");
933 assert_eq!(
934 extract_commit_type("revert: undo previous commit"),
935 "revert"
936 );
937
938 assert_eq!(extract_commit_type("feat(auth): add login"), "feat");
940 assert_eq!(
941 extract_commit_type("fix(api): resolve endpoint issue"),
942 "fix"
943 );
944 assert_eq!(
945 extract_commit_type("docs(readme): update instructions"),
946 "docs"
947 );
948
949 assert_eq!(extract_commit_type("FEAT: uppercase type"), "feat");
951 assert_eq!(extract_commit_type("Fix: mixed case"), "fix");
952 assert_eq!(extract_commit_type("DOCS: all caps"), "docs");
953
954 assert_eq!(extract_commit_type("random commit message"), "other");
956 assert_eq!(extract_commit_type("update: not conventional"), "other");
957 assert_eq!(
958 extract_commit_type("feature: close but not standard"),
959 "other"
960 );
961 assert_eq!(extract_commit_type("no colon here"), "other");
962 assert_eq!(extract_commit_type(""), "other");
963
964 assert_eq!(extract_commit_type("feat:no space after colon"), "feat");
966 assert_eq!(extract_commit_type("feat : extra spaces"), "feat");
967 assert_eq!(
968 extract_commit_type("feat(scope)(weird): nested parens"),
969 "feat"
970 );
971 }
972}