1use crate::adapters::PackageAdapter;
2use crate::errors::{Result, SampoError, io_error_with_path};
3use crate::filters::should_ignore_package;
4use crate::manifest::{ManifestMetadata, update_manifest_versions};
5use crate::types::{
6 Bump, DependencyUpdate, PackageInfo, PackageKind, PackageSpecifier, ReleaseOutput,
7 ReleasedPackage, SpecResolution, Workspace, format_ambiguity_options,
8};
9use crate::{
10 changeset::ChangesetInfo, config::Config, current_branch, detect_github_repo_slug_with_config,
11 discover_workspace, enrich_changeset_message, get_commit_hash_for_path, load_changesets,
12};
13use chrono::{DateTime, FixedOffset, Local, Utc};
14use chrono_tz::Tz;
15use semver::{BuildMetadata, Prerelease, Version};
16use std::collections::{BTreeMap, BTreeSet};
17use std::ffi::OsStr;
18use std::fs;
19use std::path::{Path, PathBuf};
20
21pub fn format_dependency_updates_message(updates: &[DependencyUpdate]) -> Option<String> {
26 if updates.is_empty() {
27 return None;
28 }
29
30 let mut parsed_updates: Vec<(
31 Option<PackageSpecifier>,
32 Option<String>,
33 String,
34 &DependencyUpdate,
35 )> = Vec::with_capacity(updates.len());
36 let mut labels_by_name: BTreeMap<String, BTreeSet<String>> = BTreeMap::new();
37
38 for dep in updates {
39 if let Ok(spec) = PackageSpecifier::parse(&dep.name) {
40 let base_name = spec.name.clone();
41 if let Some(kind) = spec.kind {
42 labels_by_name
43 .entry(base_name.clone())
44 .or_default()
45 .insert(kind.as_str().to_string());
46 } else {
47 labels_by_name.entry(base_name.clone()).or_default();
48 }
49 parsed_updates.push((Some(spec), None, base_name, dep));
50 } else if let Some((prefix, name)) = dep.name.split_once('/') {
51 let base_name = name.to_string();
52 labels_by_name
53 .entry(base_name.clone())
54 .or_default()
55 .insert(prefix.to_ascii_lowercase());
56 parsed_updates.push((None, Some(prefix.to_string()), base_name, dep));
57 } else {
58 let base_name = dep.name.clone();
59 labels_by_name.entry(base_name.clone()).or_default();
60 parsed_updates.push((None, None, base_name, dep));
61 }
62 }
63
64 let ambiguous_names: BTreeSet<String> = labels_by_name
65 .iter()
66 .filter_map(|(name, labels)| {
67 if labels.len() > 1 {
68 Some(name.clone())
69 } else {
70 None
71 }
72 })
73 .collect();
74
75 let dep_list = parsed_updates
76 .into_iter()
77 .map(|(spec_opt, raw_prefix, base_name, dep)| {
78 let is_ambiguous = ambiguous_names.contains(&base_name);
79 let display_label = if let Some(spec) = spec_opt.as_ref() {
80 if let Some(kind) = spec.kind {
81 if is_ambiguous {
82 format!("{}/{}", kind.as_str(), spec.name)
83 } else {
84 spec.display_name(false)
85 }
86 } else {
87 spec.display_name(false)
88 }
89 } else if let Some(prefix) = raw_prefix.as_ref() {
90 if is_ambiguous {
91 format!("{}/{}", prefix.to_ascii_lowercase(), base_name)
92 } else {
93 base_name.clone()
94 }
95 } else {
96 base_name.clone()
97 };
98 format!("{display_label}@{}", dep.new_version)
99 })
100 .collect::<Vec<_>>()
101 .join(", ");
102
103 Some(format!("Updated dependencies: {}", dep_list))
104}
105
106pub fn build_dependency_updates(updates: &[(String, String)]) -> Vec<DependencyUpdate> {
108 updates
109 .iter()
110 .map(|(name, version)| DependencyUpdate {
111 name: name.clone(),
112 new_version: version.clone(),
113 })
114 .collect()
115}
116
117fn resolve_package_spec<'a>(
118 workspace: &'a Workspace,
119 spec: &PackageSpecifier,
120) -> Result<&'a PackageInfo> {
121 match workspace.resolve_specifier(spec) {
122 SpecResolution::Match(info) => Ok(info),
123 SpecResolution::NotFound { query } => match query.identifier() {
124 Some(identifier) => Err(SampoError::Changeset(format!(
125 "Changeset references '{}', but it was not found in the workspace.",
126 identifier
127 ))),
128 None => Err(SampoError::Changeset(format!(
129 "Changeset references '{}', but no matching package exists in the workspace.",
130 query.base_name()
131 ))),
132 },
133 SpecResolution::Ambiguous { query, matches } => {
134 let options = format_ambiguity_options(&matches);
135 Err(SampoError::Changeset(format!(
136 "Changeset references '{}', which matches multiple packages. \
137 Disambiguate using one of: {}.",
138 query.base_name(),
139 options
140 )))
141 }
142 }
143}
144
145fn resolve_config_value(workspace: &Workspace, value: &str, context: &str) -> Result<String> {
146 let spec = PackageSpecifier::parse(value).map_err(|reason| {
147 SampoError::Config(format!(
148 "{}: invalid package reference '{}': {}",
149 context, value, reason
150 ))
151 })?;
152
153 match workspace.resolve_specifier(&spec) {
154 SpecResolution::Match(info) => Ok(info.canonical_identifier().to_string()),
155 SpecResolution::NotFound { query } => Err(SampoError::Config(format!(
156 "{}: package '{}' not found in the workspace.",
157 context,
158 query.display()
159 ))),
160 SpecResolution::Ambiguous { query, matches } => {
161 let options = format_ambiguity_options(&matches);
162 Err(SampoError::Config(format!(
163 "{}: package '{}' is ambiguous. Use one of: {}.",
164 context,
165 query.base_name(),
166 options
167 )))
168 }
169 }
170}
171
172fn resolve_config_groups(
173 workspace: &Workspace,
174 groups: &[Vec<String>],
175 section: &str,
176) -> Result<Vec<Vec<String>>> {
177 let mut resolved = Vec::with_capacity(groups.len());
178 for (idx, group) in groups.iter().enumerate() {
179 let mut resolved_group = Vec::with_capacity(group.len());
180 let context = format!("{} group {}", section, idx + 1);
181 for value in group {
182 let identifier = resolve_config_value(workspace, value, &context)?;
183 resolved_group.push(identifier);
184 }
185 resolved.push(resolved_group);
186 }
187 Ok(resolved)
188}
189
190pub fn create_dependency_update_entry(updates: &[DependencyUpdate]) -> Option<(String, Bump)> {
194 format_dependency_updates_message(updates).map(|msg| (msg, Bump::Patch))
195}
196
197pub fn create_fixed_dependency_policy_entry(bump: Bump) -> (String, Bump) {
201 (
202 "Bumped due to fixed dependency group policy".to_string(),
203 bump,
204 )
205}
206
207pub fn infer_bump_from_versions(old_ver: &str, new_ver: &str) -> Bump {
212 let old_parts: Vec<u32> = old_ver.split('.').filter_map(|s| s.parse().ok()).collect();
213 let new_parts: Vec<u32> = new_ver.split('.').filter_map(|s| s.parse().ok()).collect();
214
215 if old_parts.len() >= 3 && new_parts.len() >= 3 {
216 if new_parts[0] > old_parts[0] {
217 Bump::Major
218 } else if new_parts[1] > old_parts[1] {
219 Bump::Minor
220 } else {
221 Bump::Patch
222 }
223 } else {
224 Bump::Patch
225 }
226}
227
228pub fn detect_all_dependency_explanations(
244 changesets: &[ChangesetInfo],
245 workspace: &Workspace,
246 config: &Config,
247 releases: &BTreeMap<String, (String, String)>,
248) -> Result<BTreeMap<String, Vec<(String, Bump)>>> {
249 let mut messages_by_pkg: BTreeMap<String, Vec<(String, Bump)>> = BTreeMap::new();
250 let include_kind = workspace.has_multiple_package_kinds();
251
252 let bumped_packages: BTreeSet<String> = releases.keys().cloned().collect();
254 let policy_packages =
255 detect_fixed_dependency_policy_packages(changesets, workspace, config, &bumped_packages)?;
256
257 for (pkg_name, policy_bump) in policy_packages {
258 let actual_bump = if let Some((old_ver, new_ver)) = releases.get(&pkg_name) {
260 infer_bump_from_versions(old_ver, new_ver)
261 } else {
262 policy_bump
263 };
264
265 let (msg, bump_type) = create_fixed_dependency_policy_entry(actual_bump);
266 messages_by_pkg
267 .entry(pkg_name)
268 .or_default()
269 .push((msg, bump_type));
270 }
271
272 let new_version_by_name: BTreeMap<String, String> = releases
277 .iter()
278 .map(|(name, (_old, new_ver))| (name.clone(), new_ver.clone()))
279 .collect();
280
281 let by_id: BTreeMap<String, &PackageInfo> = workspace
283 .members
284 .iter()
285 .filter(|c| !should_ignore_package(config, workspace, c).unwrap_or(false))
286 .map(|c| (c.canonical_identifier().to_string(), c))
287 .collect();
288
289 for crate_id in releases.keys() {
291 if let Some(crate_info) = by_id.get(crate_id) {
292 let mut updated_deps = Vec::new();
294 for dep_name in &crate_info.internal_deps {
295 if let Some(new_version) = new_version_by_name.get(dep_name as &str) {
296 let display_dep = by_id
298 .get(dep_name)
299 .map(|info| info.display_name(include_kind))
300 .or_else(|| {
301 PackageSpecifier::parse(dep_name)
302 .ok()
303 .map(|spec| spec.display_name(include_kind))
304 })
305 .unwrap_or_else(|| dep_name.clone());
306 updated_deps.push((display_dep, new_version.clone()));
307 }
308 }
309
310 if !updated_deps.is_empty() {
311 let updates = build_dependency_updates(&updated_deps);
313 if let Some((msg, bump)) = create_dependency_update_entry(&updates) {
314 messages_by_pkg
315 .entry(crate_id.clone())
316 .or_default()
317 .push((msg, bump));
318 }
319 }
320 }
321 }
322
323 Ok(messages_by_pkg)
324}
325
326pub fn detect_fixed_dependency_policy_packages(
332 changesets: &[ChangesetInfo],
333 workspace: &Workspace,
334 config: &Config,
335 bumped_packages: &BTreeSet<String>,
336) -> Result<BTreeMap<String, Bump>> {
337 let mut packages_with_changesets: BTreeSet<String> = BTreeSet::new();
339 for cs in changesets {
340 for (spec, _) in &cs.entries {
341 let info = resolve_package_spec(workspace, spec)?;
342 packages_with_changesets.insert(info.canonical_identifier().to_string());
343 }
344 }
345
346 let resolved_groups =
347 resolve_config_groups(workspace, &config.fixed_dependencies, "packages.fixed")?;
348
349 let mut dependents: BTreeMap<String, BTreeSet<String>> = BTreeMap::new();
351 for crate_info in &workspace.members {
352 if should_ignore_package(config, workspace, crate_info).unwrap_or(false) {
354 continue;
355 }
356
357 for dep_name in &crate_info.internal_deps {
358 dependents
359 .entry(dep_name.clone())
360 .or_default()
361 .insert(crate_info.canonical_identifier().to_string());
362 }
363 }
364
365 let mut packages_affected_by_cascade = BTreeSet::new();
367 for pkg_with_changeset in &packages_with_changesets {
368 let mut queue = vec![pkg_with_changeset.clone()];
369 let mut visited = BTreeSet::new();
370
371 while let Some(pkg) = queue.pop() {
372 if visited.contains(&pkg) {
373 continue;
374 }
375 visited.insert(pkg.clone());
376
377 if let Some(deps) = dependents.get(&pkg) {
378 for dep in deps {
379 packages_affected_by_cascade.insert(dep.clone());
380 queue.push(dep.clone());
381 }
382 }
383 }
384 }
385
386 let mut result = BTreeMap::new();
388
389 for pkg_name in bumped_packages {
390 if packages_with_changesets.contains(pkg_name) {
392 continue;
393 }
394
395 if packages_affected_by_cascade.contains(pkg_name) {
397 continue;
398 }
399
400 for group in &resolved_groups {
402 if !group.contains(pkg_name) {
403 continue;
404 }
405
406 let has_affected_group_member = group.iter().any(|member_id| {
407 member_id != pkg_name
408 && (packages_with_changesets.contains(member_id)
409 || packages_affected_by_cascade.contains(member_id))
410 });
411
412 if !has_affected_group_member {
413 continue;
414 }
415
416 let group_bump = group
418 .iter()
419 .filter_map(|member_id| {
420 if !packages_with_changesets.contains(member_id) {
421 return None;
422 }
423 changesets
424 .iter()
425 .filter_map(|cs| {
426 cs.entries.iter().find_map(|(spec, bump)| {
427 let info = resolve_package_spec(workspace, spec).ok()?;
428 if info.canonical_identifier() == member_id.as_str() {
429 Some(*bump)
430 } else {
431 None
432 }
433 })
434 })
435 .max()
436 })
437 .max()
438 .unwrap_or(Bump::Patch);
439
440 result.insert(pkg_name.clone(), group_bump);
441 break;
442 }
443 }
444
445 Ok(result)
446}
447
448type InitialBumpsResult = (
450 BTreeMap<String, Bump>, BTreeMap<String, Vec<(String, Bump)>>, BTreeSet<std::path::PathBuf>, );
454
455type ReleasePlan = Vec<(String, String, String)>; struct PlanState {
460 messages_by_pkg: BTreeMap<String, Vec<(String, Bump)>>,
461 used_paths: BTreeSet<PathBuf>,
462 releases: ReleasePlan,
463 released_packages: Vec<ReleasedPackage>,
464}
465
466enum PlanOutcome {
468 NoApplicablePackages,
469 NoMatchingCrates,
470 Plan(PlanState),
471}
472
473pub fn run_release(root: &std::path::Path, dry_run: bool) -> Result<ReleaseOutput> {
475 let workspace = discover_workspace(root)?;
476 let config = Config::load(&workspace.root)?;
477
478 let branch = current_branch()?;
479 if !config.is_release_branch(&branch) {
480 return Err(SampoError::Release(format!(
481 "Branch '{}' is not configured for releases (allowed: {:?})",
482 branch,
483 config.release_branches().into_iter().collect::<Vec<_>>()
484 )));
485 }
486
487 validate_fixed_dependencies(&config, &workspace)?;
489
490 let changesets_dir = workspace.root.join(".sampo").join("changesets");
491 let prerelease_dir = workspace.root.join(".sampo").join("prerelease");
492
493 let current_changesets = load_changesets(&changesets_dir)?;
494 let preserved_changesets = load_changesets(&prerelease_dir)?;
495
496 let mut using_preserved = false;
497 let mut cached_plan_state: Option<PlanState> = None;
498
499 if current_changesets.is_empty() {
500 if preserved_changesets.is_empty() {
501 println!(
502 "No changesets found in {}",
503 workspace.root.join(".sampo").join("changesets").display()
504 );
505 return Ok(ReleaseOutput {
506 released_packages: vec![],
507 dry_run,
508 });
509 }
510 using_preserved = true;
511 } else {
512 match compute_plan_state(¤t_changesets, &workspace, &config)? {
513 PlanOutcome::Plan(plan) => {
514 let is_prerelease_preview = releases_include_prerelease(&plan.releases);
515 if !is_prerelease_preview && !preserved_changesets.is_empty() {
516 using_preserved = true;
517 } else {
518 cached_plan_state = Some(plan);
519 }
520 }
521 PlanOutcome::NoApplicablePackages => {
522 if preserved_changesets.is_empty() {
523 println!("No applicable packages found in changesets.");
524 return Ok(ReleaseOutput {
525 released_packages: vec![],
526 dry_run,
527 });
528 }
529 using_preserved = true;
530 }
531 PlanOutcome::NoMatchingCrates => {
532 if preserved_changesets.is_empty() {
533 println!("No matching workspace crates to release.");
534 return Ok(ReleaseOutput {
535 released_packages: vec![],
536 dry_run,
537 });
538 }
539 using_preserved = true;
540 }
541 }
542 }
543
544 let mut final_changesets;
545 let plan_state = if using_preserved {
546 if dry_run {
547 final_changesets = current_changesets;
548 final_changesets.extend(preserved_changesets);
549 } else {
550 restore_prerelease_changesets(&prerelease_dir, &changesets_dir)?;
551 final_changesets = load_changesets(&changesets_dir)?;
552 }
553
554 match compute_plan_state(&final_changesets, &workspace, &config)? {
555 PlanOutcome::Plan(plan) => plan,
556 PlanOutcome::NoApplicablePackages => {
557 println!("No applicable packages found in changesets.");
558 return Ok(ReleaseOutput {
559 released_packages: vec![],
560 dry_run,
561 });
562 }
563 PlanOutcome::NoMatchingCrates => {
564 println!("No matching workspace crates to release.");
565 return Ok(ReleaseOutput {
566 released_packages: vec![],
567 dry_run,
568 });
569 }
570 }
571 } else {
572 final_changesets = current_changesets;
573 match cached_plan_state {
574 Some(plan) => plan,
575 None => match compute_plan_state(&final_changesets, &workspace, &config)? {
576 PlanOutcome::Plan(plan) => plan,
577 PlanOutcome::NoApplicablePackages => {
578 println!("No applicable packages found in changesets.");
579 return Ok(ReleaseOutput {
580 released_packages: vec![],
581 dry_run,
582 });
583 }
584 PlanOutcome::NoMatchingCrates => {
585 println!("No matching workspace crates to release.");
586 return Ok(ReleaseOutput {
587 released_packages: vec![],
588 dry_run,
589 });
590 }
591 },
592 }
593 };
594
595 let PlanState {
596 mut messages_by_pkg,
597 used_paths,
598 releases,
599 released_packages,
600 } = plan_state;
601
602 print_release_plan(&workspace, &releases);
603
604 let is_prerelease_release = releases_include_prerelease(&releases);
605
606 if dry_run {
607 println!("Dry-run: no files modified, no tags created.");
608 return Ok(ReleaseOutput {
609 released_packages,
610 dry_run: true,
611 });
612 }
613
614 apply_releases(
615 &releases,
616 &workspace,
617 &mut messages_by_pkg,
618 &final_changesets,
619 &config,
620 )?;
621
622 finalize_consumed_changesets(used_paths, &workspace.root, is_prerelease_release)?;
623
624 if workspace.root.join("Cargo.lock").exists()
629 && let Err(e) = regenerate_lockfile(&workspace.root)
630 {
631 eprintln!("Warning: failed to regenerate Cargo.lock, {}", e);
634 }
635
636 Ok(ReleaseOutput {
637 released_packages,
638 dry_run: false,
639 })
640}
641
642fn compute_plan_state(
643 changesets: &[ChangesetInfo],
644 workspace: &Workspace,
645 config: &Config,
646) -> Result<PlanOutcome> {
647 let (mut bump_by_pkg, messages_by_pkg, used_paths) =
648 compute_initial_bumps(changesets, workspace, config)?;
649
650 if bump_by_pkg.is_empty() {
651 return Ok(PlanOutcome::NoApplicablePackages);
652 }
653
654 let dependents = build_dependency_graph(workspace, config);
655 apply_dependency_cascade(&mut bump_by_pkg, &dependents, config, workspace)?;
656 apply_linked_dependencies(&mut bump_by_pkg, config, workspace)?;
657
658 let releases = prepare_release_plan(&bump_by_pkg, workspace)?;
659 if releases.is_empty() {
660 return Ok(PlanOutcome::NoMatchingCrates);
661 }
662
663 let released_packages: Vec<ReleasedPackage> = releases
664 .iter()
665 .map(|(name, old_version, new_version)| {
666 let bump = bump_by_pkg.get(name).copied().unwrap_or(Bump::Patch);
667 let display_name = workspace
668 .find_by_identifier(name)
669 .map(|info| info.name.clone())
670 .unwrap_or_else(|| name.clone());
671 ReleasedPackage {
672 name: display_name,
673 identifier: name.clone(),
674 old_version: old_version.clone(),
675 new_version: new_version.clone(),
676 bump,
677 }
678 })
679 .collect();
680
681 Ok(PlanOutcome::Plan(PlanState {
682 messages_by_pkg,
683 used_paths,
684 releases,
685 released_packages,
686 }))
687}
688
689fn releases_include_prerelease(releases: &ReleasePlan) -> bool {
690 releases.iter().any(|(_, _, new_version)| {
691 Version::parse(new_version)
692 .map(|v| !v.pre.is_empty())
693 .unwrap_or(false)
694 })
695}
696
697pub(crate) fn restore_prerelease_changesets(
698 prerelease_dir: &Path,
699 changesets_dir: &Path,
700) -> Result<()> {
701 if !prerelease_dir.exists() {
702 return Ok(());
703 }
704
705 for entry in fs::read_dir(prerelease_dir)? {
706 let entry = entry?;
707 let path = entry.path();
708 if !path.is_file() {
709 continue;
710 }
711 if path.extension().and_then(|ext| ext.to_str()) != Some("md") {
712 continue;
713 }
714
715 let _ = move_changeset_file(&path, changesets_dir)?;
717 }
718
719 Ok(())
720}
721
722fn finalize_consumed_changesets(
723 used_paths: BTreeSet<PathBuf>,
724 workspace_root: &Path,
725 preserve_for_prerelease: bool,
726) -> Result<()> {
727 if used_paths.is_empty() {
728 return Ok(());
729 }
730
731 if preserve_for_prerelease {
732 let prerelease_dir = workspace_root.join(".sampo").join("prerelease");
733 for path in used_paths {
734 if !path.exists() {
735 continue;
736 }
737 let _ = move_changeset_file(&path, &prerelease_dir)?;
738 }
739 println!("Preserved consumed changesets for pre-release.");
740 } else {
741 for path in used_paths {
742 if !path.exists() {
743 continue;
744 }
745 fs::remove_file(&path).map_err(|err| SampoError::Io(io_error_with_path(err, &path)))?;
746 }
747 println!("Removed consumed changesets.");
748 }
749
750 Ok(())
751}
752
753pub(crate) fn move_changeset_file(source: &Path, dest_dir: &Path) -> Result<PathBuf> {
754 if !source.exists() {
755 return Ok(source.to_path_buf());
756 }
757
758 fs::create_dir_all(dest_dir)?;
759 let file_name = source
760 .file_name()
761 .ok_or_else(|| SampoError::Changeset("Invalid changeset file name".to_string()))?;
762
763 let mut destination = dest_dir.join(file_name);
764 if destination == source {
765 return Ok(destination);
766 }
767
768 if destination.exists() {
769 destination = unique_destination_path(dest_dir, file_name);
770 }
771
772 fs::rename(source, &destination)?;
773 Ok(destination)
774}
775
776fn unique_destination_path(dir: &Path, file_name: &OsStr) -> PathBuf {
777 let file_path = Path::new(file_name);
778 let stem = file_path
779 .file_stem()
780 .map(|s| s.to_string_lossy().into_owned())
781 .unwrap_or_else(|| file_name.to_string_lossy().into_owned());
782 let ext = file_path
783 .extension()
784 .map(|s| s.to_string_lossy().into_owned());
785
786 let mut counter = 1;
787 loop {
788 let candidate_name = if let Some(ref ext) = ext {
789 format!("{}-{}.{}", stem, counter, ext)
790 } else {
791 format!("{}-{}", stem, counter)
792 };
793 let candidate = dir.join(&candidate_name);
794 if !candidate.exists() {
795 return candidate;
796 }
797 counter += 1;
798 }
799}
800
801pub(crate) fn regenerate_lockfile(root: &Path) -> Result<()> {
806 PackageAdapter::Cargo.regenerate_lockfile(root)
807}
808
809fn compute_initial_bumps(
811 changesets: &[ChangesetInfo],
812 ws: &Workspace,
813 cfg: &Config,
814) -> Result<InitialBumpsResult> {
815 let mut bump_by_pkg: BTreeMap<String, Bump> = BTreeMap::new();
816 let mut messages_by_pkg: BTreeMap<String, Vec<(String, Bump)>> = BTreeMap::new();
817 let mut used_paths: BTreeSet<std::path::PathBuf> = BTreeSet::new();
818
819 let repo_slug = detect_github_repo_slug_with_config(&ws.root, cfg.github_repository.as_deref());
821 let github_token = std::env::var("GITHUB_TOKEN")
822 .ok()
823 .or_else(|| std::env::var("GH_TOKEN").ok());
824
825 for cs in changesets {
826 let mut consumed_changeset = false;
827 for (spec, bump) in &cs.entries {
828 let info = resolve_package_spec(ws, spec)?;
829 if should_ignore_package(cfg, ws, info)? {
830 continue;
831 }
832
833 consumed_changeset = true;
835
836 let identifier = info.canonical_identifier().to_string();
837
838 bump_by_pkg
839 .entry(identifier.clone())
840 .and_modify(|b| {
841 if *bump > *b {
842 *b = *bump;
843 }
844 })
845 .or_insert(*bump);
846
847 let commit_hash = get_commit_hash_for_path(&ws.root, &cs.path);
849 let enriched = if let Some(hash) = commit_hash {
850 enrich_changeset_message(
851 &cs.message,
852 &hash,
853 &ws.root,
854 repo_slug.as_deref(),
855 github_token.as_deref(),
856 cfg.changelog_show_commit_hash,
857 cfg.changelog_show_acknowledgments,
858 )
859 } else {
860 cs.message.clone()
861 };
862
863 messages_by_pkg
864 .entry(identifier)
865 .or_default()
866 .push((enriched, *bump));
867 }
868 if consumed_changeset {
869 used_paths.insert(cs.path.clone());
870 }
871 }
872
873 Ok((bump_by_pkg, messages_by_pkg, used_paths))
874}
875
876fn build_dependency_graph(ws: &Workspace, cfg: &Config) -> BTreeMap<String, BTreeSet<String>> {
879 let mut dependents: BTreeMap<String, BTreeSet<String>> = BTreeMap::new();
880
881 let ignored_packages: BTreeSet<String> = ws
883 .members
884 .iter()
885 .filter(|c| should_ignore_package(cfg, ws, c).unwrap_or(false))
886 .map(|c| c.canonical_identifier().to_string())
887 .collect();
888
889 for c in &ws.members {
890 if c.kind != PackageKind::Cargo {
892 continue;
893 }
894
895 let identifier = c.canonical_identifier();
897 if ignored_packages.contains(identifier) {
898 continue;
899 }
900
901 for dep in &c.internal_deps {
902 if ignored_packages.contains(dep) {
904 continue;
905 }
906
907 dependents
908 .entry(dep.clone())
909 .or_default()
910 .insert(identifier.to_string());
911 }
912 }
913 dependents
914}
915
916fn apply_dependency_cascade(
918 bump_by_pkg: &mut BTreeMap<String, Bump>,
919 dependents: &BTreeMap<String, BTreeSet<String>>,
920 cfg: &Config,
921 ws: &Workspace,
922) -> Result<()> {
923 let resolved_fixed_groups =
924 resolve_config_groups(ws, &cfg.fixed_dependencies, "packages.fixed")?;
925
926 let find_fixed_group = |pkg_id: &str| -> Option<usize> {
928 resolved_fixed_groups
929 .iter()
930 .position(|group| group.contains(&pkg_id.to_string()))
931 };
932
933 let mut by_id: BTreeMap<String, &PackageInfo> = BTreeMap::new();
935 for c in &ws.members {
936 if c.kind == PackageKind::Cargo {
937 by_id.insert(c.canonical_identifier().to_string(), c);
938 }
939 }
940
941 let mut queue: Vec<String> = bump_by_pkg.keys().cloned().collect();
942 let mut seen: BTreeSet<String> = queue.iter().cloned().collect();
943
944 while let Some(changed) = queue.pop() {
945 let changed_bump = bump_by_pkg.get(&changed).copied().unwrap_or(Bump::Patch);
946
947 if let Some(deps) = dependents.get(&changed) {
949 for dep_name in deps {
950 if let Some(info) = by_id.get(dep_name) {
952 match should_ignore_package(cfg, ws, info) {
953 Ok(true) => continue,
954 Ok(false) => {} Err(_) => {
956 }
959 }
960 }
961
962 let dependent_bump = if find_fixed_group(dep_name).is_some() {
964 changed_bump
966 } else {
967 Bump::Patch
969 };
970
971 let entry = bump_by_pkg
972 .entry(dep_name.clone())
973 .or_insert(dependent_bump);
974 if *entry < dependent_bump {
976 *entry = dependent_bump;
977 }
978 if !seen.contains(dep_name) {
979 queue.push(dep_name.clone());
980 seen.insert(dep_name.clone());
981 }
982 }
983 }
984
985 if let Some(group_idx) = find_fixed_group(&changed) {
987 for group_member in &resolved_fixed_groups[group_idx] {
989 if group_member == &changed {
990 continue;
991 }
992
993 if let Some(info) = by_id.get(group_member) {
995 match should_ignore_package(cfg, ws, info) {
996 Ok(true) => continue,
997 Ok(false) => {}
998 Err(_) => {
999 }
1002 }
1003 }
1004
1005 let entry = bump_by_pkg
1006 .entry(group_member.clone())
1007 .or_insert(changed_bump);
1008 if *entry < changed_bump {
1010 *entry = changed_bump;
1011 }
1012 if !seen.contains(group_member) {
1013 queue.push(group_member.clone());
1014 seen.insert(group_member.clone());
1015 }
1016 }
1017 }
1018 }
1019
1020 Ok(())
1021}
1022
1023fn apply_linked_dependencies(
1025 bump_by_pkg: &mut BTreeMap<String, Bump>,
1026 cfg: &Config,
1027 ws: &Workspace,
1028) -> Result<()> {
1029 let resolved_groups = resolve_config_groups(ws, &cfg.linked_dependencies, "packages.linked")?;
1030
1031 for group in &resolved_groups {
1032 let mut group_has_bumps = false;
1034 let mut highest_bump = Bump::Patch;
1035
1036 for group_member in group {
1038 if let Some(&member_bump) = bump_by_pkg.get(group_member) {
1039 group_has_bumps = true;
1040 if member_bump > highest_bump {
1041 highest_bump = member_bump;
1042 }
1043 }
1044 }
1045
1046 if group_has_bumps {
1048 for group_member in group {
1051 if bump_by_pkg.contains_key(group_member) {
1052 let current_bump = bump_by_pkg
1054 .get(group_member)
1055 .copied()
1056 .unwrap_or(Bump::Patch);
1057 if highest_bump > current_bump {
1058 bump_by_pkg.insert(group_member.clone(), highest_bump);
1059 }
1060 }
1061 }
1062 }
1063 }
1064
1065 Ok(())
1066}
1067
1068fn prepare_release_plan(
1070 bump_by_pkg: &BTreeMap<String, Bump>,
1071 ws: &Workspace,
1072) -> Result<ReleasePlan> {
1073 let mut by_id: BTreeMap<String, &PackageInfo> = BTreeMap::new();
1075 for c in &ws.members {
1076 if c.kind == PackageKind::Cargo {
1077 by_id.insert(c.canonical_identifier().to_string(), c);
1078 }
1079 }
1080
1081 let mut releases: Vec<(String, String, String)> = Vec::new(); for (identifier, bump) in bump_by_pkg {
1083 if let Some(info) = by_id.get(identifier) {
1084 let old = if info.version.is_empty() {
1085 "0.0.0".to_string()
1086 } else {
1087 info.version.clone()
1088 };
1089
1090 let newv = bump_version(&old, *bump).unwrap_or_else(|_| old.clone());
1091
1092 releases.push((identifier.clone(), old, newv));
1093 }
1094 }
1095
1096 Ok(releases)
1097}
1098
1099fn print_release_plan(workspace: &Workspace, releases: &ReleasePlan) {
1101 let include_kind = workspace.has_multiple_package_kinds();
1102 println!("Planned releases:");
1103 for (identifier, old, newv) in releases {
1104 let display = workspace
1105 .find_by_identifier(identifier)
1106 .map(|info| info.display_name(include_kind))
1107 .or_else(|| {
1108 PackageSpecifier::parse(identifier)
1109 .ok()
1110 .map(|spec| spec.display_name(include_kind))
1111 })
1112 .unwrap_or_else(|| identifier.clone());
1113 println!(" {display}: {old} -> {newv}");
1114 }
1115}
1116
1117#[derive(Debug, Clone, Copy)]
1118enum ReleaseDateTimezone {
1119 Local,
1120 Utc,
1121 Offset(FixedOffset),
1122 Named(Tz),
1123}
1124
1125fn parse_release_date_timezone(spec: &str) -> Result<ReleaseDateTimezone> {
1126 let trimmed = spec.trim();
1127 let invalid_value = || {
1128 SampoError::Config(format!(
1129 "Unsupported changelog.release_date_timezone value '{trimmed}'. Use 'UTC', 'local', a fixed offset like '+02:00', or an IANA timezone name such as 'Europe/Paris'."
1130 ))
1131 };
1132 if trimmed.is_empty() {
1133 return Ok(ReleaseDateTimezone::Local);
1134 }
1135
1136 if trimmed.eq_ignore_ascii_case("local") {
1137 return Ok(ReleaseDateTimezone::Local);
1138 }
1139
1140 if trimmed.eq_ignore_ascii_case("utc") || trimmed.eq_ignore_ascii_case("z") {
1141 return Ok(ReleaseDateTimezone::Utc);
1142 }
1143
1144 if let Ok(zone) = trimmed.parse::<Tz>() {
1145 return Ok(ReleaseDateTimezone::Named(zone));
1146 }
1147
1148 let bytes = trimmed.as_bytes();
1149 if bytes.len() < 2 {
1150 return Err(invalid_value());
1151 }
1152
1153 let sign = match bytes[0] as char {
1154 '+' => 1,
1155 '-' => -1,
1156 _ => return Err(invalid_value()),
1157 };
1158
1159 let remainder = &trimmed[1..];
1160 if remainder.is_empty() {
1161 return Err(invalid_value());
1162 }
1163
1164 let (hour_part, minute_part) = if let Some(idx) = remainder.find(':') {
1165 let (h, m) = remainder.split_at(idx);
1166 if m.len() < 2 {
1167 return Err(invalid_value());
1168 }
1169 (h, &m[1..])
1170 } else if remainder.len() == 4 {
1171 (&remainder[..2], &remainder[2..])
1172 } else if remainder.len() == 2 {
1173 (remainder, "00")
1174 } else {
1175 return Err(invalid_value());
1176 };
1177
1178 let hours: u32 = hour_part.parse().map_err(|_| invalid_value())?;
1179 let minutes: u32 = minute_part.parse().map_err(|_| invalid_value())?;
1180
1181 if hours > 23 || minutes > 59 {
1182 return Err(SampoError::Config(format!(
1183 "Unsupported changelog.release_date_timezone value '{trimmed}'. Hours must be <= 23 and minutes <= 59."
1184 )));
1185 }
1186
1187 let total_seconds = (hours * 3600 + minutes * 60) as i32;
1188 let offset = if sign >= 0 {
1189 FixedOffset::east_opt(total_seconds)
1190 } else {
1191 FixedOffset::west_opt(total_seconds)
1192 };
1193
1194 match offset {
1195 Some(value) => Ok(ReleaseDateTimezone::Offset(value)),
1196 None => Err(SampoError::Config(format!(
1197 "Unsupported changelog.release_date_timezone value '{trimmed}'. Offset is out of range."
1198 ))),
1199 }
1200}
1201
1202fn compute_release_date_display(cfg: &Config) -> Result<Option<String>> {
1203 compute_release_date_display_with_now(cfg, Utc::now())
1204}
1205
1206fn compute_release_date_display_with_now(
1207 cfg: &Config,
1208 now: DateTime<Utc>,
1209) -> Result<Option<String>> {
1210 if !cfg.changelog_show_release_date {
1211 return Ok(None);
1212 }
1213
1214 let format_str = cfg.changelog_release_date_format.trim();
1215 if format_str.is_empty() {
1216 return Ok(None);
1217 }
1218
1219 let timezone_pref = cfg
1220 .changelog_release_date_timezone
1221 .as_deref()
1222 .map(str::trim)
1223 .filter(|s| !s.is_empty())
1224 .map(parse_release_date_timezone)
1225 .transpose()?;
1226
1227 let tz = timezone_pref.unwrap_or(ReleaseDateTimezone::Local);
1228
1229 let formatted = match tz {
1230 ReleaseDateTimezone::Local => now.with_timezone(&Local).format(format_str).to_string(),
1231 ReleaseDateTimezone::Utc => now.format(format_str).to_string(),
1232 ReleaseDateTimezone::Offset(offset) => {
1233 now.with_timezone(&offset).format(format_str).to_string()
1234 }
1235 ReleaseDateTimezone::Named(zone) => now.with_timezone(&zone).format(format_str).to_string(),
1236 };
1237
1238 Ok(Some(formatted))
1239}
1240
1241fn apply_releases(
1243 releases: &ReleasePlan,
1244 ws: &Workspace,
1245 messages_by_pkg: &mut BTreeMap<String, Vec<(String, Bump)>>,
1246 changesets: &[ChangesetInfo],
1247 cfg: &Config,
1248) -> Result<()> {
1249 let mut by_id: BTreeMap<String, &PackageInfo> = BTreeMap::new();
1251 for c in &ws.members {
1252 if c.kind == PackageKind::Cargo {
1253 by_id.insert(c.canonical_identifier().to_string(), c);
1254 }
1255 }
1256
1257 let manifest_metadata = ManifestMetadata::load(ws)?;
1258
1259 let releases_map: BTreeMap<String, (String, String)> = releases
1261 .iter()
1262 .map(|(name, old, new)| (name.clone(), (old.clone(), new.clone())))
1263 .collect();
1264
1265 let mut new_version_by_name: BTreeMap<String, String> = BTreeMap::new();
1266 for (identifier, _old, newv) in releases {
1267 if let Some(info) = by_id.get(identifier) {
1268 new_version_by_name.insert(info.name.clone(), newv.clone());
1269 }
1270 }
1271
1272 let dependency_explanations =
1274 detect_all_dependency_explanations(changesets, ws, cfg, &releases_map)?;
1275
1276 for (pkg_name, explanations) in dependency_explanations {
1278 messages_by_pkg
1279 .entry(pkg_name)
1280 .or_default()
1281 .extend(explanations);
1282 }
1283
1284 let release_date_display = compute_release_date_display(cfg)?;
1285
1286 let adapter = crate::adapters::PackageAdapter::Cargo;
1288 for (name, old, newv) in releases {
1289 let info = by_id.get(name.as_str()).unwrap();
1290 let manifest_path = adapter.manifest_path(&info.path);
1291 let text = fs::read_to_string(&manifest_path)?;
1292
1293 let (updated, _dep_updates) = update_manifest_versions(
1295 &manifest_path,
1296 &text,
1297 Some(newv.as_str()),
1298 &new_version_by_name,
1299 Some(&manifest_metadata),
1300 )?;
1301 fs::write(&manifest_path, updated)?;
1302
1303 let messages = messages_by_pkg.get(name).cloned().unwrap_or_default();
1304 update_changelog(
1305 &info.path,
1306 &info.name,
1307 old,
1308 newv,
1309 &messages,
1310 release_date_display.as_deref(),
1311 )?;
1312 }
1313
1314 Ok(())
1315}
1316
1317fn normalize_version_input(input: &str) -> std::result::Result<String, String> {
1318 let trimmed = input.trim();
1319 if trimmed.is_empty() {
1320 return Err("Version string cannot be empty".to_string());
1321 }
1322
1323 let boundary = trimmed
1324 .find(|ch: char| ['-', '+'].contains(&ch))
1325 .unwrap_or(trimmed.len());
1326 let (core, rest) = trimmed.split_at(boundary);
1327
1328 let parts: Vec<&str> = if core.is_empty() {
1329 Vec::new()
1330 } else {
1331 core.split('.').collect()
1332 };
1333
1334 if parts.is_empty() || parts.len() > 3 {
1335 return Err(format!(
1336 "Invalid semantic version '{input}': expected one to three numeric components"
1337 ));
1338 }
1339
1340 let mut normalized_parts = Vec::with_capacity(3);
1341 for part in &parts {
1342 if part.is_empty() {
1343 return Err(format!(
1344 "Invalid semantic version '{input}': found empty numeric component"
1345 ));
1346 }
1347 normalized_parts.push(*part);
1348 }
1349 while normalized_parts.len() < 3 {
1350 normalized_parts.push("0");
1351 }
1352
1353 let normalized_core = normalized_parts.join(".");
1354 Ok(format!("{normalized_core}{rest}"))
1355}
1356
1357pub(crate) fn parse_version_string(input: &str) -> std::result::Result<Version, String> {
1358 let normalized = normalize_version_input(input)?;
1359 Version::parse(&normalized).map_err(|err| format!("Invalid semantic version '{input}': {err}"))
1360}
1361
1362fn implied_prerelease_bump(version: &Version) -> std::result::Result<Bump, String> {
1363 if version.pre.is_empty() {
1364 return Err("Version does not contain a pre-release identifier".to_string());
1365 }
1366
1367 if version.minor == 0 && version.patch == 0 {
1368 Ok(Bump::Major)
1369 } else if version.patch == 0 {
1370 Ok(Bump::Minor)
1371 } else {
1372 Ok(Bump::Patch)
1373 }
1374}
1375
1376fn increment_prerelease(pre: &Prerelease) -> std::result::Result<Prerelease, String> {
1377 if pre.is_empty() {
1378 return Err("Pre-release identifier missing".to_string());
1379 }
1380
1381 let mut parts: Vec<String> = pre.as_str().split('.').map(|s| s.to_string()).collect();
1382 if parts.is_empty() {
1383 return Err("Pre-release identifier missing".to_string());
1384 }
1385
1386 let last_is_numeric = parts
1387 .last()
1388 .map(|part| part.chars().all(|ch| ch.is_ascii_digit()))
1389 .unwrap_or(false);
1390
1391 if last_is_numeric {
1392 let value = parts
1393 .last()
1394 .unwrap()
1395 .parse::<u64>()
1396 .map_err(|_| "Pre-release component is not a valid number".to_string())?;
1397 let incremented = value
1398 .checked_add(1)
1399 .ok_or_else(|| "Pre-release counter overflow".to_string())?;
1400 *parts.last_mut().unwrap() = incremented.to_string();
1401 } else {
1402 parts.push("1".to_string());
1403 }
1404
1405 let candidate = parts.join(".");
1406 Prerelease::new(&candidate).map_err(|err| format!("Invalid pre-release '{candidate}': {err}"))
1407}
1408
1409fn strip_trailing_numeric_identifiers(pre: &Prerelease) -> Option<Prerelease> {
1410 if pre.is_empty() {
1411 return None;
1412 }
1413
1414 let mut parts: Vec<&str> = pre.as_str().split('.').collect();
1415 while let Some(last) = parts.last() {
1416 if last.chars().all(|ch| ch.is_ascii_digit()) {
1417 parts.pop();
1418 } else {
1419 break;
1420 }
1421 }
1422
1423 if parts.is_empty() {
1424 None
1425 } else {
1426 let candidate = parts.join(".");
1427 Prerelease::new(&candidate).ok()
1428 }
1429}
1430
1431fn apply_base_bump(version: &mut Version, bump: Bump) -> std::result::Result<(), String> {
1432 match bump {
1433 Bump::Patch => {
1434 version.patch = version
1435 .patch
1436 .checked_add(1)
1437 .ok_or_else(|| "Patch component overflow".to_string())?;
1438 }
1439 Bump::Minor => {
1440 version.minor = version
1441 .minor
1442 .checked_add(1)
1443 .ok_or_else(|| "Minor component overflow".to_string())?;
1444 version.patch = 0;
1445 }
1446 Bump::Major => {
1447 version.major = version
1448 .major
1449 .checked_add(1)
1450 .ok_or_else(|| "Major component overflow".to_string())?;
1451 version.minor = 0;
1452 version.patch = 0;
1453 }
1454 }
1455 version.pre = Prerelease::EMPTY;
1456 version.build = BuildMetadata::EMPTY;
1457 Ok(())
1458}
1459
1460pub fn bump_version(old: &str, bump: Bump) -> std::result::Result<String, String> {
1462 let mut version = parse_version_string(old)?;
1463 let original_pre = version.pre.clone();
1464
1465 if original_pre.is_empty() {
1466 apply_base_bump(&mut version, bump)?;
1467 return Ok(version.to_string());
1468 }
1469
1470 let implied = implied_prerelease_bump(&version)?;
1471
1472 if bump <= implied {
1473 version.pre = increment_prerelease(&original_pre)?;
1474 version.build = BuildMetadata::EMPTY;
1475 Ok(version.to_string())
1476 } else {
1477 let base_pre = strip_trailing_numeric_identifiers(&original_pre).ok_or_else(|| {
1478 format!(
1479 "Pre-release version '{old}' must include a non-numeric identifier before the counter"
1480 )
1481 })?;
1482
1483 apply_base_bump(&mut version, bump)?;
1484 version.pre = base_pre;
1485 Ok(version.to_string())
1486 }
1487}
1488
1489fn split_intro_and_versions(body: &str) -> (&str, &str) {
1490 let mut offset = 0;
1491 let len = body.len();
1492 while offset < len {
1493 if body[offset..].starts_with("## ") {
1494 return body.split_at(offset);
1495 }
1496
1497 match body[offset..].find('\n') {
1498 Some(newline_offset) => {
1499 offset += newline_offset + 1;
1500 }
1501 None => break,
1502 }
1503 }
1504
1505 (body, "")
1506}
1507
1508fn header_matches_release_version(header_text: &str, version: &str) -> bool {
1509 if header_text == version {
1510 return true;
1511 }
1512
1513 header_text
1514 .strip_prefix(version)
1515 .map(|rest| {
1516 let trimmed = rest.trim_start();
1517 trimmed.is_empty() || trimmed.starts_with('—') || trimmed.starts_with('-')
1518 })
1519 .unwrap_or(false)
1520}
1521
1522fn update_changelog(
1523 crate_dir: &Path,
1524 package: &str,
1525 old_version: &str,
1526 new_version: &str,
1527 entries: &[(String, Bump)],
1528 release_date_display: Option<&str>,
1529) -> Result<()> {
1530 let path = crate_dir.join("CHANGELOG.md");
1531 let existing = if path.exists() {
1532 fs::read_to_string(&path)?
1533 } else {
1534 String::new()
1535 };
1536 let cleaned = existing.trim_start_matches('\u{feff}');
1537 let (intro_part, versions_part) = split_intro_and_versions(cleaned);
1538 let mut intro = intro_part.to_string();
1539 let mut versions_body = versions_part.to_string();
1540
1541 if intro.trim().is_empty() {
1542 intro = format!("# {}\n\n", package);
1543 }
1544
1545 let mut merged_major: Vec<String> = Vec::new();
1549 let mut merged_minor: Vec<String> = Vec::new();
1550 let mut merged_patch: Vec<String> = Vec::new();
1551
1552 let push_unique = |list: &mut Vec<String>, msg: &str| {
1554 if !list.iter().any(|m| m == msg) {
1555 list.push(msg.to_string());
1556 }
1557 };
1558
1559 for (msg, bump) in entries {
1561 match bump {
1562 Bump::Major => push_unique(&mut merged_major, msg),
1563 Bump::Minor => push_unique(&mut merged_minor, msg),
1564 Bump::Patch => push_unique(&mut merged_patch, msg),
1565 }
1566 }
1567
1568 let trimmed = versions_body.trim_start();
1572 if trimmed.starts_with("## ") {
1573 let mut lines_iter = trimmed.lines();
1575 let header_line = lines_iter.next().unwrap_or("").trim();
1576 let header_text = header_line.trim_start_matches("## ").trim();
1577
1578 let is_published_top = header_matches_release_version(header_text, old_version);
1579
1580 if !is_published_top {
1581 let after_header_offset = header_line.len();
1583 let rest_after_header = &trimmed[after_header_offset..];
1584 let next_rel = rest_after_header.find("\n## ");
1586 let (section_text, remaining) = match next_rel {
1587 Some(pos) => {
1588 let end = after_header_offset + pos + 1; (&trimmed[..end], &trimmed[end..])
1590 }
1591 None => (trimmed, ""),
1592 };
1593
1594 let mut current = None::<&str>;
1595 for line in section_text.lines() {
1596 let t = line.trim();
1597 if t.eq_ignore_ascii_case("### Major changes") {
1598 current = Some("major");
1599 continue;
1600 } else if t.eq_ignore_ascii_case("### Minor changes") {
1601 current = Some("minor");
1602 continue;
1603 } else if t.eq_ignore_ascii_case("### Patch changes") {
1604 current = Some("patch");
1605 continue;
1606 }
1607 if t.starts_with("- ") {
1608 let msg = t.trim_start_matches("- ").trim();
1609 match current {
1610 Some("major") => push_unique(&mut merged_major, msg),
1611 Some("minor") => push_unique(&mut merged_minor, msg),
1612 Some("patch") => push_unique(&mut merged_patch, msg),
1613 _ => {}
1614 }
1615 }
1616 }
1617
1618 versions_body = remaining.to_string();
1619 }
1620 }
1621
1622 let mut section = String::new();
1624 match release_date_display.and_then(|d| (!d.trim().is_empty()).then_some(d)) {
1625 Some(date) => section.push_str(&format!("## {new_version} — {date}\n\n")),
1626 None => section.push_str(&format!("## {new_version}\n\n")),
1627 }
1628
1629 if !merged_major.is_empty() {
1630 section.push_str("### Major changes\n\n");
1631 for msg in &merged_major {
1632 section.push_str(&crate::markdown::format_markdown_list_item(msg));
1633 }
1634 section.push('\n');
1635 }
1636 if !merged_minor.is_empty() {
1637 section.push_str("### Minor changes\n\n");
1638 for msg in &merged_minor {
1639 section.push_str(&crate::markdown::format_markdown_list_item(msg));
1640 }
1641 section.push('\n');
1642 }
1643 if !merged_patch.is_empty() {
1644 section.push_str("### Patch changes\n\n");
1645 for msg in &merged_patch {
1646 section.push_str(&crate::markdown::format_markdown_list_item(msg));
1647 }
1648 section.push('\n');
1649 }
1650
1651 let mut combined = String::new();
1652 combined.push_str(&intro);
1653
1654 if !combined.is_empty() && !combined.ends_with("\n\n") {
1655 if combined.ends_with('\n') {
1656 combined.push('\n');
1657 } else {
1658 combined.push_str("\n\n");
1659 }
1660 }
1661
1662 combined.push_str(§ion);
1663
1664 if !versions_body.trim().is_empty() {
1665 if !combined.ends_with("\n\n") {
1666 if combined.ends_with('\n') {
1667 combined.push('\n');
1668 } else {
1669 combined.push_str("\n\n");
1670 }
1671 }
1672 combined.push_str(&versions_body);
1673 }
1674
1675 fs::write(&path, combined)?;
1676 Ok(())
1677}
1678
1679fn validate_fixed_dependencies(config: &Config, workspace: &Workspace) -> Result<()> {
1681 resolve_config_groups(workspace, &config.fixed_dependencies, "packages.fixed")?;
1682 Ok(())
1683}
1684
1685#[cfg(test)]
1686mod tests {
1687 use super::*;
1688 use chrono::TimeZone;
1689 use std::collections::BTreeMap;
1690
1691 #[test]
1692 fn preserves_changelog_intro_when_updating() {
1693 use std::fs;
1694 use tempfile::tempdir;
1695
1696 let temp = tempdir().unwrap();
1697 let crate_dir = temp.path();
1698 let intro = "# Custom Changelog Header\n\nIntro text before versions.\n\n";
1699 let existing = format!(
1700 "{}## 1.0.0 — 2024-06-19\n\n### Patch changes\n\n- Existing entry\n",
1701 intro
1702 );
1703 fs::write(crate_dir.join("CHANGELOG.md"), existing).unwrap();
1704
1705 let entries = vec![("Add new feature".to_string(), Bump::Minor)];
1706 update_changelog(
1707 crate_dir,
1708 "my-package",
1709 "1.0.0",
1710 "1.0.1",
1711 &entries,
1712 Some("2024-06-20"),
1713 )
1714 .unwrap();
1715
1716 let updated = fs::read_to_string(crate_dir.join("CHANGELOG.md")).unwrap();
1717 assert!(updated.starts_with(intro));
1718
1719 let new_idx = updated.find("## 1.0.1").unwrap();
1720 let old_idx = updated.find("## 1.0.0").unwrap();
1721 assert!(new_idx >= intro.len());
1722 assert!(new_idx < old_idx);
1723 assert!(updated.contains("## 1.0.1 — 2024-06-20"));
1724 assert!(updated.contains("- Add new feature"));
1725 assert!(updated.contains("- Existing entry"));
1726 }
1727
1728 #[test]
1729 fn creates_default_header_when_missing_intro() {
1730 use std::fs;
1731 use tempfile::tempdir;
1732
1733 let temp = tempdir().unwrap();
1734 let crate_dir = temp.path();
1735
1736 let entries = vec![("Initial release".to_string(), Bump::Major)];
1737 update_changelog(crate_dir, "new-package", "0.1.0", "1.0.0", &entries, None).unwrap();
1738
1739 let updated = fs::read_to_string(crate_dir.join("CHANGELOG.md")).unwrap();
1740 assert!(updated.starts_with("# new-package\n\n## 1.0.0"));
1741 }
1742
1743 #[test]
1744 fn header_matches_release_version_handles_suffixes() {
1745 assert!(header_matches_release_version("1.0.0", "1.0.0"));
1746 assert!(header_matches_release_version(
1747 "1.0.0 — 2024-06-20",
1748 "1.0.0"
1749 ));
1750 assert!(header_matches_release_version("1.0.0-2024-06-20", "1.0.0"));
1751 assert!(!header_matches_release_version(
1752 "1.0.1 — 2024-06-20",
1753 "1.0.0"
1754 ));
1755 }
1756
1757 #[test]
1758 fn update_changelog_skips_blank_release_date() {
1759 use std::fs;
1760 use tempfile::tempdir;
1761
1762 let temp = tempdir().unwrap();
1763 let crate_dir = temp.path();
1764 let entries = vec![("Bug fix".to_string(), Bump::Patch)];
1765
1766 update_changelog(
1767 crate_dir,
1768 "blank-date",
1769 "0.1.0",
1770 "0.1.1",
1771 &entries,
1772 Some(" "),
1773 )
1774 .unwrap();
1775
1776 let updated = fs::read_to_string(crate_dir.join("CHANGELOG.md")).unwrap();
1777 assert!(updated.contains("## 0.1.1\n"));
1778 assert!(!updated.contains("—"));
1779 }
1780
1781 #[test]
1782 fn parse_release_date_timezone_accepts_utc() {
1783 match parse_release_date_timezone("UTC").unwrap() {
1784 ReleaseDateTimezone::Utc => {}
1785 _ => panic!("Expected UTC timezone"),
1786 }
1787 }
1788
1789 #[test]
1790 fn parse_release_date_timezone_accepts_offset() {
1791 match parse_release_date_timezone("+05:45").unwrap() {
1792 ReleaseDateTimezone::Offset(offset) => {
1793 assert_eq!(offset.local_minus_utc(), 5 * 3600 + 45 * 60);
1794 }
1795 _ => panic!("Expected fixed offset"),
1796 }
1797 }
1798
1799 #[test]
1800 fn parse_release_date_timezone_rejects_invalid() {
1801 let err = parse_release_date_timezone("Not/AZone").unwrap_err();
1802 let msg = err.to_string();
1803 assert!(msg.contains("release_date_timezone"));
1804 }
1805
1806 #[test]
1807 fn compute_release_date_display_uses_utc() {
1808 let cfg = Config {
1809 changelog_release_date_format: "%Z".to_string(),
1810 changelog_release_date_timezone: Some("UTC".to_string()),
1811 ..Default::default()
1812 };
1813
1814 let now = Utc.with_ymd_and_hms(2024, 1, 15, 12, 0, 0).unwrap();
1815 let display = compute_release_date_display_with_now(&cfg, now)
1816 .unwrap()
1817 .unwrap();
1818 assert_eq!(display, "UTC");
1819 }
1820
1821 #[test]
1822 fn parse_release_date_timezone_accepts_named_zone() {
1823 match parse_release_date_timezone("Europe/Paris").unwrap() {
1824 ReleaseDateTimezone::Named(zone) => {
1825 assert_eq!(zone, chrono_tz::Europe::Paris);
1826 }
1827 _ => panic!("Expected named timezone"),
1828 }
1829 }
1830
1831 #[test]
1832 fn compute_release_date_display_uses_offset() {
1833 let cfg = Config {
1834 changelog_release_date_format: "%z".to_string(),
1835 changelog_release_date_timezone: Some("-03:30".to_string()),
1836 ..Default::default()
1837 };
1838
1839 let now = Utc.with_ymd_and_hms(2024, 6, 1, 12, 0, 0).unwrap();
1840 let display = compute_release_date_display_with_now(&cfg, now)
1841 .unwrap()
1842 .unwrap();
1843 assert_eq!(display, "-0330");
1844 }
1845
1846 #[test]
1847 fn compute_release_date_display_uses_named_zone() {
1848 let cfg = Config {
1849 changelog_release_date_format: "%Z".to_string(),
1850 changelog_release_date_timezone: Some("America/New_York".to_string()),
1851 ..Default::default()
1852 };
1853
1854 let now = Utc.with_ymd_and_hms(2024, 1, 15, 12, 0, 0).unwrap();
1855 let display = compute_release_date_display_with_now(&cfg, now)
1856 .unwrap()
1857 .unwrap();
1858 assert_eq!(display, "EST");
1859 }
1860
1861 #[test]
1862 fn test_ignore_packages_in_dependency_cascade() {
1863 use crate::types::{PackageInfo, PackageKind, Workspace};
1864 use std::path::PathBuf;
1865
1866 let root = PathBuf::from("/tmp/test");
1868 let workspace = Workspace {
1869 root: root.clone(),
1870 members: vec![
1871 PackageInfo {
1872 name: "main-package".to_string(),
1873 identifier: "cargo/main-package".to_string(),
1874 version: "1.0.0".to_string(),
1875 path: root.join("main-package"),
1876 internal_deps: BTreeSet::new(),
1877 kind: PackageKind::Cargo,
1878 },
1879 PackageInfo {
1880 name: "examples-package".to_string(),
1881 identifier: "cargo/examples-package".to_string(),
1882 version: "1.0.0".to_string(),
1883 path: root.join("examples/package"),
1884 internal_deps: BTreeSet::new(),
1885 kind: PackageKind::Cargo,
1886 },
1887 PackageInfo {
1888 name: "benchmarks-utils".to_string(),
1889 identifier: "cargo/benchmarks-utils".to_string(),
1890 version: "1.0.0".to_string(),
1891 path: root.join("benchmarks/utils"),
1892 internal_deps: BTreeSet::new(),
1893 kind: PackageKind::Cargo,
1894 },
1895 ],
1896 };
1897
1898 let config = Config {
1900 ignore: vec!["examples/*".to_string(), "benchmarks/*".to_string()],
1901 ..Default::default()
1902 };
1903
1904 let mut dependents = BTreeMap::new();
1906 dependents.insert(
1907 "cargo/main-package".to_string(),
1908 ["cargo/examples-package", "cargo/benchmarks-utils"]
1909 .iter()
1910 .map(|s| s.to_string())
1911 .collect(),
1912 );
1913
1914 let mut bump_by_pkg = BTreeMap::new();
1916 bump_by_pkg.insert("cargo/main-package".to_string(), Bump::Minor);
1917
1918 apply_dependency_cascade(&mut bump_by_pkg, &dependents, &config, &workspace).unwrap();
1920
1921 assert_eq!(bump_by_pkg.len(), 1);
1923 assert!(bump_by_pkg.contains_key("cargo/main-package"));
1924 assert!(!bump_by_pkg.contains_key("cargo/examples-package"));
1925 assert!(!bump_by_pkg.contains_key("cargo/benchmarks-utils"));
1926 }
1927
1928 #[test]
1929 fn test_ignored_packages_excluded_from_dependency_graph() {
1930 use crate::types::{PackageInfo, PackageKind, Workspace};
1931 use std::collections::BTreeSet;
1932 use std::path::PathBuf;
1933
1934 let root = PathBuf::from("/tmp/test");
1935 let workspace = Workspace {
1936 root: root.clone(),
1937 members: vec![
1938 PackageInfo {
1939 name: "main-package".to_string(),
1940 identifier: "cargo/main-package".to_string(),
1941 version: "1.0.0".to_string(),
1942 path: root.join("main-package"),
1943 internal_deps: ["cargo/examples-package".to_string()].into_iter().collect(),
1944 kind: PackageKind::Cargo,
1945 },
1946 PackageInfo {
1947 name: "examples-package".to_string(),
1948 identifier: "cargo/examples-package".to_string(),
1949 version: "1.0.0".to_string(),
1950 path: root.join("examples/package"),
1951 internal_deps: BTreeSet::new(),
1952 kind: PackageKind::Cargo,
1953 },
1954 ],
1955 };
1956
1957 let config = Config {
1959 ignore: vec!["examples/*".to_string()],
1960 ..Default::default()
1961 };
1962
1963 let dependents = build_dependency_graph(&workspace, &config);
1965
1966 assert!(!dependents.contains_key("cargo/examples-package"));
1969
1970 assert!(dependents.is_empty());
1973 }
1974}