1use crate::adapters::{ManifestMetadata, PackageAdapter};
2use crate::errors::{Result, SampoError, io_error_with_path};
3use crate::filters::should_ignore_package;
4use crate::types::{
5 Bump, DependencyUpdate, PackageInfo, PackageKind, PackageSpecifier, ReleaseOutput,
6 ReleasedPackage, SpecResolution, Workspace, format_ambiguity_options,
7};
8use crate::{
9 changeset::ChangesetInfo, config::Config, current_branch, detect_github_repo_slug_with_config,
10 discover_workspace, enrich_changeset_message, get_commit_hash_for_path, load_changesets,
11};
12use chrono::{DateTime, FixedOffset, Local, Utc};
13use chrono_tz::Tz;
14use semver::{BuildMetadata, Prerelease, Version};
15use std::collections::{BTreeMap, BTreeSet};
16use std::ffi::OsStr;
17use std::fs;
18use std::path::{Path, PathBuf};
19
20pub fn format_dependency_updates_message(updates: &[DependencyUpdate]) -> Option<String> {
25 if updates.is_empty() {
26 return None;
27 }
28
29 let mut parsed_updates: Vec<(
30 Option<PackageSpecifier>,
31 Option<String>,
32 String,
33 &DependencyUpdate,
34 )> = Vec::with_capacity(updates.len());
35 let mut labels_by_name: BTreeMap<String, BTreeSet<String>> = BTreeMap::new();
36
37 for dep in updates {
38 if let Ok(spec) = PackageSpecifier::parse(&dep.name) {
39 let base_name = spec.name.clone();
40 if let Some(kind) = spec.kind {
41 labels_by_name
42 .entry(base_name.clone())
43 .or_default()
44 .insert(kind.as_str().to_string());
45 } else {
46 labels_by_name.entry(base_name.clone()).or_default();
47 }
48 parsed_updates.push((Some(spec), None, base_name, dep));
49 } else if let Some((prefix, name)) = dep.name.split_once('/') {
50 let base_name = name.to_string();
51 labels_by_name
52 .entry(base_name.clone())
53 .or_default()
54 .insert(prefix.to_ascii_lowercase());
55 parsed_updates.push((None, Some(prefix.to_string()), base_name, dep));
56 } else {
57 let base_name = dep.name.clone();
58 labels_by_name.entry(base_name.clone()).or_default();
59 parsed_updates.push((None, None, base_name, dep));
60 }
61 }
62
63 let ambiguous_names: BTreeSet<String> = labels_by_name
64 .iter()
65 .filter_map(|(name, labels)| {
66 if labels.len() > 1 {
67 Some(name.clone())
68 } else {
69 None
70 }
71 })
72 .collect();
73
74 let dep_list = parsed_updates
75 .into_iter()
76 .map(|(spec_opt, raw_prefix, base_name, dep)| {
77 let is_ambiguous = ambiguous_names.contains(&base_name);
78 let display_label = if let Some(spec) = spec_opt.as_ref() {
79 if let Some(kind) = spec.kind {
80 if is_ambiguous {
81 format!("{}/{}", kind.as_str(), spec.name)
82 } else {
83 spec.display_name(false)
84 }
85 } else {
86 spec.display_name(false)
87 }
88 } else if let Some(prefix) = raw_prefix.as_ref() {
89 if is_ambiguous {
90 format!("{}/{}", prefix.to_ascii_lowercase(), base_name)
91 } else {
92 base_name.clone()
93 }
94 } else {
95 base_name.clone()
96 };
97 format!("{display_label}@{}", dep.new_version)
98 })
99 .collect::<Vec<_>>()
100 .join(", ");
101
102 Some(format!("Updated dependencies: {}", dep_list))
103}
104
105pub fn build_dependency_updates(updates: &[(String, String)]) -> Vec<DependencyUpdate> {
107 updates
108 .iter()
109 .map(|(name, version)| DependencyUpdate {
110 name: name.clone(),
111 new_version: version.clone(),
112 })
113 .collect()
114}
115
116fn resolve_package_spec<'a>(
117 workspace: &'a Workspace,
118 spec: &PackageSpecifier,
119) -> Result<&'a PackageInfo> {
120 match workspace.resolve_specifier(spec) {
121 SpecResolution::Match(info) => Ok(info),
122 SpecResolution::NotFound { query } => match query.identifier() {
123 Some(identifier) => Err(SampoError::Changeset(format!(
124 "Changeset references '{}', but it was not found in the workspace.",
125 identifier
126 ))),
127 None => Err(SampoError::Changeset(format!(
128 "Changeset references '{}', but no matching package exists in the workspace.",
129 query.base_name()
130 ))),
131 },
132 SpecResolution::Ambiguous { query, matches } => {
133 let options = format_ambiguity_options(&matches);
134 Err(SampoError::Changeset(format!(
135 "Changeset references '{}', which matches multiple packages. \
136 Disambiguate using one of: {}.",
137 query.base_name(),
138 options
139 )))
140 }
141 }
142}
143
144fn resolve_config_value(workspace: &Workspace, value: &str, context: &str) -> Result<String> {
145 let spec = PackageSpecifier::parse(value).map_err(|reason| {
146 SampoError::Config(format!(
147 "{}: invalid package reference '{}': {}",
148 context, value, reason
149 ))
150 })?;
151
152 match workspace.resolve_specifier(&spec) {
153 SpecResolution::Match(info) => Ok(info.canonical_identifier().to_string()),
154 SpecResolution::NotFound { query } => Err(SampoError::Config(format!(
155 "{}: package '{}' not found in the workspace.",
156 context,
157 query.display()
158 ))),
159 SpecResolution::Ambiguous { query, matches } => {
160 let options = format_ambiguity_options(&matches);
161 Err(SampoError::Config(format!(
162 "{}: package '{}' is ambiguous. Use one of: {}.",
163 context,
164 query.base_name(),
165 options
166 )))
167 }
168 }
169}
170
171fn resolve_config_groups(
172 workspace: &Workspace,
173 groups: &[Vec<String>],
174 section: &str,
175) -> Result<Vec<Vec<String>>> {
176 let mut resolved = Vec::with_capacity(groups.len());
177 for (idx, group) in groups.iter().enumerate() {
178 let mut resolved_group = Vec::with_capacity(group.len());
179 let context = format!("{} group {}", section, idx + 1);
180 for value in group {
181 let identifier = resolve_config_value(workspace, value, &context)?;
182 resolved_group.push(identifier);
183 }
184 resolved.push(resolved_group);
185 }
186 Ok(resolved)
187}
188
189pub fn create_dependency_update_entry(updates: &[DependencyUpdate]) -> Option<(String, Bump)> {
193 format_dependency_updates_message(updates).map(|msg| (msg, Bump::Patch))
194}
195
196pub fn create_fixed_dependency_policy_entry(bump: Bump) -> (String, Bump) {
200 (
201 "Bumped due to fixed dependency group policy".to_string(),
202 bump,
203 )
204}
205
206pub fn infer_bump_from_versions(old_ver: &str, new_ver: &str) -> Bump {
211 let old_parts: Vec<u32> = old_ver.split('.').filter_map(|s| s.parse().ok()).collect();
212 let new_parts: Vec<u32> = new_ver.split('.').filter_map(|s| s.parse().ok()).collect();
213
214 if old_parts.len() >= 3 && new_parts.len() >= 3 {
215 if new_parts[0] > old_parts[0] {
216 Bump::Major
217 } else if new_parts[1] > old_parts[1] {
218 Bump::Minor
219 } else {
220 Bump::Patch
221 }
222 } else {
223 Bump::Patch
224 }
225}
226
227pub fn detect_all_dependency_explanations(
243 changesets: &[ChangesetInfo],
244 workspace: &Workspace,
245 config: &Config,
246 releases: &BTreeMap<String, (String, String)>,
247) -> Result<BTreeMap<String, Vec<(String, Bump)>>> {
248 let mut messages_by_pkg: BTreeMap<String, Vec<(String, Bump)>> = BTreeMap::new();
249 let include_kind = workspace.has_multiple_package_kinds();
250
251 let bumped_packages: BTreeSet<String> = releases.keys().cloned().collect();
253 let policy_packages =
254 detect_fixed_dependency_policy_packages(changesets, workspace, config, &bumped_packages)?;
255
256 for (pkg_name, policy_bump) in policy_packages {
257 let actual_bump = if let Some((old_ver, new_ver)) = releases.get(&pkg_name) {
259 infer_bump_from_versions(old_ver, new_ver)
260 } else {
261 policy_bump
262 };
263
264 let (msg, bump_type) = create_fixed_dependency_policy_entry(actual_bump);
265 messages_by_pkg
266 .entry(pkg_name)
267 .or_default()
268 .push((msg, bump_type));
269 }
270
271 let new_version_by_name: BTreeMap<String, String> = releases
276 .iter()
277 .map(|(name, (_old, new_ver))| (name.clone(), new_ver.clone()))
278 .collect();
279
280 let by_id: BTreeMap<String, &PackageInfo> = workspace
282 .members
283 .iter()
284 .filter(|c| !should_ignore_package(config, workspace, c).unwrap_or(false))
285 .map(|c| (c.canonical_identifier().to_string(), c))
286 .collect();
287
288 for crate_id in releases.keys() {
290 if let Some(crate_info) = by_id.get(crate_id) {
291 let mut updated_deps = Vec::new();
293 for dep_name in &crate_info.internal_deps {
294 if let Some(new_version) = new_version_by_name.get(dep_name as &str) {
295 let display_dep = by_id
297 .get(dep_name)
298 .map(|info| info.display_name(include_kind))
299 .or_else(|| {
300 PackageSpecifier::parse(dep_name)
301 .ok()
302 .map(|spec| spec.display_name(include_kind))
303 })
304 .unwrap_or_else(|| dep_name.clone());
305 updated_deps.push((display_dep, new_version.clone()));
306 }
307 }
308
309 if !updated_deps.is_empty() {
310 let updates = build_dependency_updates(&updated_deps);
312 if let Some((msg, bump)) = create_dependency_update_entry(&updates) {
313 messages_by_pkg
314 .entry(crate_id.clone())
315 .or_default()
316 .push((msg, bump));
317 }
318 }
319 }
320 }
321
322 Ok(messages_by_pkg)
323}
324
325pub fn detect_fixed_dependency_policy_packages(
331 changesets: &[ChangesetInfo],
332 workspace: &Workspace,
333 config: &Config,
334 bumped_packages: &BTreeSet<String>,
335) -> Result<BTreeMap<String, Bump>> {
336 let mut packages_with_changesets: BTreeSet<String> = BTreeSet::new();
338 for cs in changesets {
339 for (spec, _) in &cs.entries {
340 let info = resolve_package_spec(workspace, spec)?;
341 packages_with_changesets.insert(info.canonical_identifier().to_string());
342 }
343 }
344
345 let resolved_groups =
346 resolve_config_groups(workspace, &config.fixed_dependencies, "packages.fixed")?;
347
348 let mut dependents: BTreeMap<String, BTreeSet<String>> = BTreeMap::new();
350 for crate_info in &workspace.members {
351 if should_ignore_package(config, workspace, crate_info).unwrap_or(false) {
353 continue;
354 }
355
356 for dep_name in &crate_info.internal_deps {
357 dependents
358 .entry(dep_name.clone())
359 .or_default()
360 .insert(crate_info.canonical_identifier().to_string());
361 }
362 }
363
364 let mut packages_affected_by_cascade = BTreeSet::new();
366 for pkg_with_changeset in &packages_with_changesets {
367 let mut queue = vec![pkg_with_changeset.clone()];
368 let mut visited = BTreeSet::new();
369
370 while let Some(pkg) = queue.pop() {
371 if visited.contains(&pkg) {
372 continue;
373 }
374 visited.insert(pkg.clone());
375
376 if let Some(deps) = dependents.get(&pkg) {
377 for dep in deps {
378 packages_affected_by_cascade.insert(dep.clone());
379 queue.push(dep.clone());
380 }
381 }
382 }
383 }
384
385 let mut result = BTreeMap::new();
387
388 for pkg_name in bumped_packages {
389 if packages_with_changesets.contains(pkg_name) {
391 continue;
392 }
393
394 if packages_affected_by_cascade.contains(pkg_name) {
396 continue;
397 }
398
399 for group in &resolved_groups {
401 if !group.contains(pkg_name) {
402 continue;
403 }
404
405 let has_affected_group_member = group.iter().any(|member_id| {
406 member_id != pkg_name
407 && (packages_with_changesets.contains(member_id)
408 || packages_affected_by_cascade.contains(member_id))
409 });
410
411 if !has_affected_group_member {
412 continue;
413 }
414
415 let group_bump = group
417 .iter()
418 .filter_map(|member_id| {
419 if !packages_with_changesets.contains(member_id) {
420 return None;
421 }
422 changesets
423 .iter()
424 .filter_map(|cs| {
425 cs.entries.iter().find_map(|(spec, bump)| {
426 let info = resolve_package_spec(workspace, spec).ok()?;
427 if info.canonical_identifier() == member_id.as_str() {
428 Some(*bump)
429 } else {
430 None
431 }
432 })
433 })
434 .max()
435 })
436 .max()
437 .unwrap_or(Bump::Patch);
438
439 result.insert(pkg_name.clone(), group_bump);
440 break;
441 }
442 }
443
444 Ok(result)
445}
446
447type InitialBumpsResult = (
449 BTreeMap<String, Bump>, BTreeMap<String, Vec<(String, Bump)>>, BTreeSet<std::path::PathBuf>, );
453
454type ReleasePlan = Vec<(String, String, String)>; struct PlanState {
459 messages_by_pkg: BTreeMap<String, Vec<(String, Bump)>>,
460 used_paths: BTreeSet<PathBuf>,
461 releases: ReleasePlan,
462 released_packages: Vec<ReleasedPackage>,
463}
464
465enum PlanOutcome {
467 NoApplicablePackages,
468 NoMatchingCrates,
469 Plan(PlanState),
470}
471
472pub fn run_release(root: &std::path::Path, dry_run: bool) -> Result<ReleaseOutput> {
474 let workspace = discover_workspace(root)?;
475 let config = Config::load(&workspace.root)?;
476
477 let branch = current_branch()?;
478 if !config.is_release_branch(&branch) {
479 return Err(SampoError::Release(format!(
480 "Branch '{}' is not configured for releases (allowed: {:?})",
481 branch,
482 config.release_branches().into_iter().collect::<Vec<_>>()
483 )));
484 }
485
486 validate_fixed_dependencies(&config, &workspace)?;
488
489 let changesets_dir = workspace.root.join(".sampo").join("changesets");
490 let prerelease_dir = workspace.root.join(".sampo").join("prerelease");
491
492 let current_changesets = load_changesets(&changesets_dir)?;
493 let preserved_changesets = load_changesets(&prerelease_dir)?;
494
495 let mut using_preserved = false;
496 let mut cached_plan_state: Option<PlanState> = None;
497
498 if current_changesets.is_empty() {
499 if preserved_changesets.is_empty() {
500 println!(
501 "No changesets found in {}",
502 workspace.root.join(".sampo").join("changesets").display()
503 );
504 return Ok(ReleaseOutput {
505 released_packages: vec![],
506 dry_run,
507 });
508 }
509 using_preserved = true;
510 } else {
511 match compute_plan_state(¤t_changesets, &workspace, &config)? {
512 PlanOutcome::Plan(plan) => {
513 let is_prerelease_preview = releases_include_prerelease(&plan.releases);
514 if !is_prerelease_preview && !preserved_changesets.is_empty() {
515 using_preserved = true;
516 } else {
517 cached_plan_state = Some(plan);
518 }
519 }
520 PlanOutcome::NoApplicablePackages => {
521 if preserved_changesets.is_empty() {
522 println!("No applicable packages found in changesets.");
523 return Ok(ReleaseOutput {
524 released_packages: vec![],
525 dry_run,
526 });
527 }
528 using_preserved = true;
529 }
530 PlanOutcome::NoMatchingCrates => {
531 if preserved_changesets.is_empty() {
532 println!("No matching workspace crates to release.");
533 return Ok(ReleaseOutput {
534 released_packages: vec![],
535 dry_run,
536 });
537 }
538 using_preserved = true;
539 }
540 }
541 }
542
543 let mut final_changesets;
544 let plan_state = if using_preserved {
545 if dry_run {
546 final_changesets = current_changesets;
547 final_changesets.extend(preserved_changesets);
548 } else {
549 restore_prerelease_changesets(&prerelease_dir, &changesets_dir)?;
550 final_changesets = load_changesets(&changesets_dir)?;
551 }
552
553 match compute_plan_state(&final_changesets, &workspace, &config)? {
554 PlanOutcome::Plan(plan) => plan,
555 PlanOutcome::NoApplicablePackages => {
556 println!("No applicable packages found in changesets.");
557 return Ok(ReleaseOutput {
558 released_packages: vec![],
559 dry_run,
560 });
561 }
562 PlanOutcome::NoMatchingCrates => {
563 println!("No matching workspace crates to release.");
564 return Ok(ReleaseOutput {
565 released_packages: vec![],
566 dry_run,
567 });
568 }
569 }
570 } else {
571 final_changesets = current_changesets;
572 match cached_plan_state {
573 Some(plan) => plan,
574 None => match compute_plan_state(&final_changesets, &workspace, &config)? {
575 PlanOutcome::Plan(plan) => plan,
576 PlanOutcome::NoApplicablePackages => {
577 println!("No applicable packages found in changesets.");
578 return Ok(ReleaseOutput {
579 released_packages: vec![],
580 dry_run,
581 });
582 }
583 PlanOutcome::NoMatchingCrates => {
584 println!("No matching workspace crates to release.");
585 return Ok(ReleaseOutput {
586 released_packages: vec![],
587 dry_run,
588 });
589 }
590 },
591 }
592 };
593
594 let PlanState {
595 mut messages_by_pkg,
596 used_paths,
597 releases,
598 released_packages,
599 } = plan_state;
600
601 print_release_plan(&workspace, &releases);
602
603 let is_prerelease_release = releases_include_prerelease(&releases);
604
605 if dry_run {
606 println!("Dry-run: no files modified, no tags created.");
607 return Ok(ReleaseOutput {
608 released_packages,
609 dry_run: true,
610 });
611 }
612
613 apply_releases(
614 &releases,
615 &workspace,
616 &mut messages_by_pkg,
617 &final_changesets,
618 &config,
619 )?;
620
621 finalize_consumed_changesets(used_paths, &workspace.root, is_prerelease_release)?;
622
623 let _ = regenerate_lockfile(&workspace);
629
630 Ok(ReleaseOutput {
631 released_packages,
632 dry_run: false,
633 })
634}
635
636fn compute_plan_state(
637 changesets: &[ChangesetInfo],
638 workspace: &Workspace,
639 config: &Config,
640) -> Result<PlanOutcome> {
641 let (mut bump_by_pkg, messages_by_pkg, used_paths) =
642 compute_initial_bumps(changesets, workspace, config)?;
643
644 if bump_by_pkg.is_empty() {
645 return Ok(PlanOutcome::NoApplicablePackages);
646 }
647
648 let dependents = build_dependency_graph(workspace, config);
649 apply_dependency_cascade(&mut bump_by_pkg, &dependents, config, workspace)?;
650 apply_linked_dependencies(&mut bump_by_pkg, config, workspace)?;
651
652 let releases = prepare_release_plan(&bump_by_pkg, workspace)?;
653 if releases.is_empty() {
654 return Ok(PlanOutcome::NoMatchingCrates);
655 }
656
657 let released_packages: Vec<ReleasedPackage> = releases
658 .iter()
659 .map(|(name, old_version, new_version)| {
660 let bump = bump_by_pkg.get(name).copied().unwrap_or(Bump::Patch);
661 let display_name = workspace
662 .find_by_identifier(name)
663 .map(|info| info.name.clone())
664 .unwrap_or_else(|| name.clone());
665 ReleasedPackage {
666 name: display_name,
667 identifier: name.clone(),
668 old_version: old_version.clone(),
669 new_version: new_version.clone(),
670 bump,
671 }
672 })
673 .collect();
674
675 Ok(PlanOutcome::Plan(PlanState {
676 messages_by_pkg,
677 used_paths,
678 releases,
679 released_packages,
680 }))
681}
682
683fn releases_include_prerelease(releases: &ReleasePlan) -> bool {
684 releases.iter().any(|(_, _, new_version)| {
685 Version::parse(new_version)
686 .map(|v| !v.pre.is_empty())
687 .unwrap_or(false)
688 })
689}
690
691pub(crate) fn restore_prerelease_changesets(
692 prerelease_dir: &Path,
693 changesets_dir: &Path,
694) -> Result<()> {
695 if !prerelease_dir.exists() {
696 return Ok(());
697 }
698
699 for entry in fs::read_dir(prerelease_dir)? {
700 let entry = entry?;
701 let path = entry.path();
702 if !path.is_file() {
703 continue;
704 }
705 if path.extension().and_then(|ext| ext.to_str()) != Some("md") {
706 continue;
707 }
708
709 let _ = move_changeset_file(&path, changesets_dir)?;
711 }
712
713 Ok(())
714}
715
716fn finalize_consumed_changesets(
717 used_paths: BTreeSet<PathBuf>,
718 workspace_root: &Path,
719 preserve_for_prerelease: bool,
720) -> Result<()> {
721 if used_paths.is_empty() {
722 return Ok(());
723 }
724
725 if preserve_for_prerelease {
726 let prerelease_dir = workspace_root.join(".sampo").join("prerelease");
727 for path in used_paths {
728 if !path.exists() {
729 continue;
730 }
731 let _ = move_changeset_file(&path, &prerelease_dir)?;
732 }
733 println!("Preserved consumed changesets for pre-release.");
734 } else {
735 for path in used_paths {
736 if !path.exists() {
737 continue;
738 }
739 fs::remove_file(&path).map_err(|err| SampoError::Io(io_error_with_path(err, &path)))?;
740 }
741 println!("Removed consumed changesets.");
742 }
743
744 Ok(())
745}
746
747pub(crate) fn move_changeset_file(source: &Path, dest_dir: &Path) -> Result<PathBuf> {
748 if !source.exists() {
749 return Ok(source.to_path_buf());
750 }
751
752 fs::create_dir_all(dest_dir)?;
753 let file_name = source
754 .file_name()
755 .ok_or_else(|| SampoError::Changeset("Invalid changeset file name".to_string()))?;
756
757 let mut destination = dest_dir.join(file_name);
758 if destination == source {
759 return Ok(destination);
760 }
761
762 if destination.exists() {
763 destination = unique_destination_path(dest_dir, file_name);
764 }
765
766 fs::rename(source, &destination)?;
767 Ok(destination)
768}
769
770fn unique_destination_path(dir: &Path, file_name: &OsStr) -> PathBuf {
771 let file_path = Path::new(file_name);
772 let stem = file_path
773 .file_stem()
774 .map(|s| s.to_string_lossy().into_owned())
775 .unwrap_or_else(|| file_name.to_string_lossy().into_owned());
776 let ext = file_path
777 .extension()
778 .map(|s| s.to_string_lossy().into_owned());
779
780 let mut counter = 1;
781 loop {
782 let candidate_name = if let Some(ref ext) = ext {
783 format!("{}-{}.{}", stem, counter, ext)
784 } else {
785 format!("{}-{}", stem, counter)
786 };
787 let candidate = dir.join(&candidate_name);
788 if !candidate.exists() {
789 return candidate;
790 }
791 counter += 1;
792 }
793}
794
795pub(crate) fn regenerate_lockfile(workspace: &Workspace) -> Result<()> {
801 use crate::types::PackageKind;
802 use rustc_hash::FxHashSet;
803
804 let mut ecosystems: FxHashSet<PackageKind> = FxHashSet::default();
806 for pkg in &workspace.members {
807 ecosystems.insert(pkg.kind);
808 }
809
810 let mut errors: Vec<(PackageKind, String)> = Vec::new();
812
813 for kind in ecosystems {
814 let adapter = match kind {
815 PackageKind::Cargo => PackageAdapter::Cargo,
816 PackageKind::Npm => PackageAdapter::Npm,
817 PackageKind::Hex => PackageAdapter::Hex,
818 };
819
820 let lockfile_exists = match kind {
821 PackageKind::Cargo => workspace.root.join("Cargo.lock").exists(),
822 PackageKind::Npm => {
823 workspace.root.join("package-lock.json").exists()
824 || workspace.root.join("pnpm-lock.yaml").exists()
825 || workspace.root.join("yarn.lock").exists()
826 || workspace.root.join("bun.lockb").exists()
827 || workspace.root.join("npm-shrinkwrap.json").exists()
828 }
829 PackageKind::Hex => workspace.root.join("mix.lock").exists(),
830 };
831
832 if lockfile_exists && let Err(e) = adapter.regenerate_lockfile(&workspace.root) {
833 errors.push((kind, e.to_string()));
834 }
835 }
836
837 if !errors.is_empty() {
839 for (kind, err) in errors {
840 eprintln!(
841 "Warning: failed to regenerate {} lockfile: {}",
842 kind.display_name(),
843 err
844 );
845 }
846 }
847
848 Ok(())
849}
850
851fn compute_initial_bumps(
853 changesets: &[ChangesetInfo],
854 ws: &Workspace,
855 cfg: &Config,
856) -> Result<InitialBumpsResult> {
857 let mut bump_by_pkg: BTreeMap<String, Bump> = BTreeMap::new();
858 let mut messages_by_pkg: BTreeMap<String, Vec<(String, Bump)>> = BTreeMap::new();
859 let mut used_paths: BTreeSet<std::path::PathBuf> = BTreeSet::new();
860
861 let repo_slug = detect_github_repo_slug_with_config(&ws.root, cfg.github_repository.as_deref());
863 let github_token = std::env::var("GITHUB_TOKEN")
864 .ok()
865 .or_else(|| std::env::var("GH_TOKEN").ok());
866
867 for cs in changesets {
868 let mut consumed_changeset = false;
869 for (spec, bump) in &cs.entries {
870 let info = resolve_package_spec(ws, spec)?;
871 if should_ignore_package(cfg, ws, info)? {
872 continue;
873 }
874
875 consumed_changeset = true;
877
878 let identifier = info.canonical_identifier().to_string();
879
880 bump_by_pkg
881 .entry(identifier.clone())
882 .and_modify(|b| {
883 if *bump > *b {
884 *b = *bump;
885 }
886 })
887 .or_insert(*bump);
888
889 let commit_hash = get_commit_hash_for_path(&ws.root, &cs.path);
891 let enriched = if let Some(hash) = commit_hash {
892 enrich_changeset_message(
893 &cs.message,
894 &hash,
895 &ws.root,
896 repo_slug.as_deref(),
897 github_token.as_deref(),
898 cfg.changelog_show_commit_hash,
899 cfg.changelog_show_acknowledgments,
900 )
901 } else {
902 cs.message.clone()
903 };
904
905 messages_by_pkg
906 .entry(identifier)
907 .or_default()
908 .push((enriched, *bump));
909 }
910 if consumed_changeset {
911 used_paths.insert(cs.path.clone());
912 }
913 }
914
915 Ok((bump_by_pkg, messages_by_pkg, used_paths))
916}
917
918fn build_dependency_graph(ws: &Workspace, cfg: &Config) -> BTreeMap<String, BTreeSet<String>> {
921 let mut dependents: BTreeMap<String, BTreeSet<String>> = BTreeMap::new();
922
923 let ignored_packages: BTreeSet<String> = ws
925 .members
926 .iter()
927 .filter(|c| should_ignore_package(cfg, ws, c).unwrap_or(false))
928 .map(|c| c.canonical_identifier().to_string())
929 .collect();
930
931 for c in &ws.members {
932 let identifier = c.canonical_identifier();
934 if ignored_packages.contains(identifier) {
935 continue;
936 }
937
938 for dep in &c.internal_deps {
939 if ignored_packages.contains(dep) {
941 continue;
942 }
943
944 dependents
945 .entry(dep.clone())
946 .or_default()
947 .insert(identifier.to_string());
948 }
949 }
950 dependents
951}
952
953fn apply_dependency_cascade(
955 bump_by_pkg: &mut BTreeMap<String, Bump>,
956 dependents: &BTreeMap<String, BTreeSet<String>>,
957 cfg: &Config,
958 ws: &Workspace,
959) -> Result<()> {
960 let resolved_fixed_groups =
961 resolve_config_groups(ws, &cfg.fixed_dependencies, "packages.fixed")?;
962
963 let find_fixed_group = |pkg_id: &str| -> Option<usize> {
965 resolved_fixed_groups
966 .iter()
967 .position(|group| group.contains(&pkg_id.to_string()))
968 };
969
970 let mut by_id: BTreeMap<String, &PackageInfo> = BTreeMap::new();
972 for c in &ws.members {
973 by_id.insert(c.canonical_identifier().to_string(), c);
974 }
975
976 let mut queue: Vec<String> = bump_by_pkg.keys().cloned().collect();
977 let mut seen: BTreeSet<String> = queue.iter().cloned().collect();
978
979 while let Some(changed) = queue.pop() {
980 let changed_bump = bump_by_pkg.get(&changed).copied().unwrap_or(Bump::Patch);
981
982 if let Some(deps) = dependents.get(&changed) {
984 for dep_name in deps {
985 if let Some(info) = by_id.get(dep_name) {
987 match should_ignore_package(cfg, ws, info) {
988 Ok(true) => continue,
989 Ok(false) => {} Err(_) => {
991 }
994 }
995 }
996
997 let dependent_bump = if find_fixed_group(dep_name).is_some() {
999 changed_bump
1001 } else {
1002 Bump::Patch
1004 };
1005
1006 let entry = bump_by_pkg
1007 .entry(dep_name.clone())
1008 .or_insert(dependent_bump);
1009 if *entry < dependent_bump {
1011 *entry = dependent_bump;
1012 }
1013 if !seen.contains(dep_name) {
1014 queue.push(dep_name.clone());
1015 seen.insert(dep_name.clone());
1016 }
1017 }
1018 }
1019
1020 if let Some(group_idx) = find_fixed_group(&changed) {
1022 for group_member in &resolved_fixed_groups[group_idx] {
1024 if group_member == &changed {
1025 continue;
1026 }
1027
1028 if let Some(info) = by_id.get(group_member) {
1030 match should_ignore_package(cfg, ws, info) {
1031 Ok(true) => continue,
1032 Ok(false) => {}
1033 Err(_) => {
1034 }
1037 }
1038 }
1039
1040 let entry = bump_by_pkg
1041 .entry(group_member.clone())
1042 .or_insert(changed_bump);
1043 if *entry < changed_bump {
1045 *entry = changed_bump;
1046 }
1047 if !seen.contains(group_member) {
1048 queue.push(group_member.clone());
1049 seen.insert(group_member.clone());
1050 }
1051 }
1052 }
1053 }
1054
1055 Ok(())
1056}
1057
1058fn apply_linked_dependencies(
1060 bump_by_pkg: &mut BTreeMap<String, Bump>,
1061 cfg: &Config,
1062 ws: &Workspace,
1063) -> Result<()> {
1064 let resolved_groups = resolve_config_groups(ws, &cfg.linked_dependencies, "packages.linked")?;
1065
1066 for group in &resolved_groups {
1067 let mut group_has_bumps = false;
1069 let mut highest_bump = Bump::Patch;
1070
1071 for group_member in group {
1073 if let Some(&member_bump) = bump_by_pkg.get(group_member) {
1074 group_has_bumps = true;
1075 if member_bump > highest_bump {
1076 highest_bump = member_bump;
1077 }
1078 }
1079 }
1080
1081 if group_has_bumps {
1083 for group_member in group {
1086 if bump_by_pkg.contains_key(group_member) {
1087 let current_bump = bump_by_pkg
1089 .get(group_member)
1090 .copied()
1091 .unwrap_or(Bump::Patch);
1092 if highest_bump > current_bump {
1093 bump_by_pkg.insert(group_member.clone(), highest_bump);
1094 }
1095 }
1096 }
1097 }
1098 }
1099
1100 Ok(())
1101}
1102
1103fn prepare_release_plan(
1105 bump_by_pkg: &BTreeMap<String, Bump>,
1106 ws: &Workspace,
1107) -> Result<ReleasePlan> {
1108 let mut by_id: BTreeMap<String, &PackageInfo> = BTreeMap::new();
1110 for c in &ws.members {
1111 by_id.insert(c.canonical_identifier().to_string(), c);
1112 }
1113
1114 let mut releases: Vec<(String, String, String)> = Vec::new(); for (identifier, bump) in bump_by_pkg {
1116 if let Some(info) = by_id.get(identifier) {
1117 let old = if info.version.is_empty() {
1118 "0.0.0".to_string()
1119 } else {
1120 info.version.clone()
1121 };
1122
1123 let newv = bump_version(&old, *bump).unwrap_or_else(|_| old.clone());
1124
1125 releases.push((identifier.clone(), old, newv));
1126 }
1127 }
1128
1129 Ok(releases)
1130}
1131
1132fn print_release_plan(workspace: &Workspace, releases: &ReleasePlan) {
1134 let include_kind = workspace.has_multiple_package_kinds();
1135 println!("Planned releases:");
1136 for (identifier, old, newv) in releases {
1137 let display = workspace
1138 .find_by_identifier(identifier)
1139 .map(|info| info.display_name(include_kind))
1140 .or_else(|| {
1141 PackageSpecifier::parse(identifier)
1142 .ok()
1143 .map(|spec| spec.display_name(include_kind))
1144 })
1145 .unwrap_or_else(|| identifier.clone());
1146 println!(" {display}: {old} -> {newv}");
1147 }
1148}
1149
1150#[derive(Debug, Clone, Copy)]
1151enum ReleaseDateTimezone {
1152 Local,
1153 Utc,
1154 Offset(FixedOffset),
1155 Named(Tz),
1156}
1157
1158fn parse_release_date_timezone(spec: &str) -> Result<ReleaseDateTimezone> {
1159 let trimmed = spec.trim();
1160 let invalid_value = || {
1161 SampoError::Config(format!(
1162 "Unsupported changelog.release_date_timezone value '{trimmed}'. Use 'UTC', 'local', a fixed offset like '+02:00', or an IANA timezone name such as 'Europe/Paris'."
1163 ))
1164 };
1165 if trimmed.is_empty() {
1166 return Ok(ReleaseDateTimezone::Local);
1167 }
1168
1169 if trimmed.eq_ignore_ascii_case("local") {
1170 return Ok(ReleaseDateTimezone::Local);
1171 }
1172
1173 if trimmed.eq_ignore_ascii_case("utc") || trimmed.eq_ignore_ascii_case("z") {
1174 return Ok(ReleaseDateTimezone::Utc);
1175 }
1176
1177 if let Ok(zone) = trimmed.parse::<Tz>() {
1178 return Ok(ReleaseDateTimezone::Named(zone));
1179 }
1180
1181 let bytes = trimmed.as_bytes();
1182 if bytes.len() < 2 {
1183 return Err(invalid_value());
1184 }
1185
1186 let sign = match bytes[0] as char {
1187 '+' => 1,
1188 '-' => -1,
1189 _ => return Err(invalid_value()),
1190 };
1191
1192 let remainder = &trimmed[1..];
1193 if remainder.is_empty() {
1194 return Err(invalid_value());
1195 }
1196
1197 let (hour_part, minute_part) = if let Some(idx) = remainder.find(':') {
1198 let (h, m) = remainder.split_at(idx);
1199 if m.len() < 2 {
1200 return Err(invalid_value());
1201 }
1202 (h, &m[1..])
1203 } else if remainder.len() == 4 {
1204 (&remainder[..2], &remainder[2..])
1205 } else if remainder.len() == 2 {
1206 (remainder, "00")
1207 } else {
1208 return Err(invalid_value());
1209 };
1210
1211 let hours: u32 = hour_part.parse().map_err(|_| invalid_value())?;
1212 let minutes: u32 = minute_part.parse().map_err(|_| invalid_value())?;
1213
1214 if hours > 23 || minutes > 59 {
1215 return Err(SampoError::Config(format!(
1216 "Unsupported changelog.release_date_timezone value '{trimmed}'. Hours must be <= 23 and minutes <= 59."
1217 )));
1218 }
1219
1220 let total_seconds = (hours * 3600 + minutes * 60) as i32;
1221 let offset = if sign >= 0 {
1222 FixedOffset::east_opt(total_seconds)
1223 } else {
1224 FixedOffset::west_opt(total_seconds)
1225 };
1226
1227 match offset {
1228 Some(value) => Ok(ReleaseDateTimezone::Offset(value)),
1229 None => Err(SampoError::Config(format!(
1230 "Unsupported changelog.release_date_timezone value '{trimmed}'. Offset is out of range."
1231 ))),
1232 }
1233}
1234
1235fn compute_release_date_display(cfg: &Config) -> Result<Option<String>> {
1236 compute_release_date_display_with_now(cfg, Utc::now())
1237}
1238
1239fn compute_release_date_display_with_now(
1240 cfg: &Config,
1241 now: DateTime<Utc>,
1242) -> Result<Option<String>> {
1243 if !cfg.changelog_show_release_date {
1244 return Ok(None);
1245 }
1246
1247 let format_str = cfg.changelog_release_date_format.trim();
1248 if format_str.is_empty() {
1249 return Ok(None);
1250 }
1251
1252 let timezone_pref = cfg
1253 .changelog_release_date_timezone
1254 .as_deref()
1255 .map(str::trim)
1256 .filter(|s| !s.is_empty())
1257 .map(parse_release_date_timezone)
1258 .transpose()?;
1259
1260 let tz = timezone_pref.unwrap_or(ReleaseDateTimezone::Local);
1261
1262 let formatted = match tz {
1263 ReleaseDateTimezone::Local => now.with_timezone(&Local).format(format_str).to_string(),
1264 ReleaseDateTimezone::Utc => now.format(format_str).to_string(),
1265 ReleaseDateTimezone::Offset(offset) => {
1266 now.with_timezone(&offset).format(format_str).to_string()
1267 }
1268 ReleaseDateTimezone::Named(zone) => now.with_timezone(&zone).format(format_str).to_string(),
1269 };
1270
1271 Ok(Some(formatted))
1272}
1273
1274fn apply_releases(
1276 releases: &ReleasePlan,
1277 ws: &Workspace,
1278 messages_by_pkg: &mut BTreeMap<String, Vec<(String, Bump)>>,
1279 changesets: &[ChangesetInfo],
1280 cfg: &Config,
1281) -> Result<()> {
1282 let mut by_id: BTreeMap<String, &PackageInfo> = BTreeMap::new();
1284 for c in &ws.members {
1285 by_id.insert(c.canonical_identifier().to_string(), c);
1286 }
1287
1288 let has_cargo = ws.members.iter().any(|pkg| pkg.kind == PackageKind::Cargo);
1289 let manifest_metadata = if has_cargo {
1290 Some(ManifestMetadata::load(ws)?)
1291 } else {
1292 None
1293 };
1294
1295 let releases_map: BTreeMap<String, (String, String)> = releases
1297 .iter()
1298 .map(|(name, old, new)| (name.clone(), (old.clone(), new.clone())))
1299 .collect();
1300
1301 let mut new_version_by_name: BTreeMap<String, String> = BTreeMap::new();
1302 for (identifier, _old, newv) in releases {
1303 if let Some(info) = by_id.get(identifier) {
1304 new_version_by_name.insert(info.name.clone(), newv.clone());
1305 }
1306 }
1307
1308 let dependency_explanations =
1310 detect_all_dependency_explanations(changesets, ws, cfg, &releases_map)?;
1311
1312 for (pkg_name, explanations) in dependency_explanations {
1314 messages_by_pkg
1315 .entry(pkg_name)
1316 .or_default()
1317 .extend(explanations);
1318 }
1319
1320 let release_date_display = compute_release_date_display(cfg)?;
1321
1322 for (name, old, newv) in releases {
1324 let info = by_id
1325 .get(name.as_str())
1326 .ok_or_else(|| SampoError::Release(format!("package '{}' not found", name)))?;
1327 let adapter = match info.kind {
1328 PackageKind::Cargo => crate::adapters::PackageAdapter::Cargo,
1329 PackageKind::Npm => crate::adapters::PackageAdapter::Npm,
1330 PackageKind::Hex => crate::adapters::PackageAdapter::Hex,
1331 };
1332 let manifest_path = adapter.manifest_path(&info.path);
1333 let text = fs::read_to_string(&manifest_path)?;
1334
1335 let cargo_metadata = match adapter {
1337 PackageAdapter::Cargo => manifest_metadata.as_ref(),
1338 PackageAdapter::Npm | PackageAdapter::Hex => None,
1339 };
1340 let (updated, _dep_updates) = adapter.update_manifest_versions(
1341 &manifest_path,
1342 &text,
1343 Some(newv.as_str()),
1344 &new_version_by_name,
1345 cargo_metadata,
1346 )?;
1347 fs::write(&manifest_path, updated)?;
1348
1349 let messages = messages_by_pkg.get(name).cloned().unwrap_or_default();
1350 update_changelog(
1351 &info.path,
1352 &info.name,
1353 old,
1354 newv,
1355 &messages,
1356 release_date_display.as_deref(),
1357 )?;
1358 }
1359
1360 Ok(())
1361}
1362
1363fn normalize_version_input(input: &str) -> std::result::Result<String, String> {
1364 let trimmed = input.trim();
1365 if trimmed.is_empty() {
1366 return Err("Version string cannot be empty".to_string());
1367 }
1368
1369 let boundary = trimmed
1370 .find(|ch: char| ['-', '+'].contains(&ch))
1371 .unwrap_or(trimmed.len());
1372 let (core, rest) = trimmed.split_at(boundary);
1373
1374 let parts: Vec<&str> = if core.is_empty() {
1375 Vec::new()
1376 } else {
1377 core.split('.').collect()
1378 };
1379
1380 if parts.is_empty() || parts.len() > 3 {
1381 return Err(format!(
1382 "Invalid semantic version '{input}': expected one to three numeric components"
1383 ));
1384 }
1385
1386 let mut normalized_parts = Vec::with_capacity(3);
1387 for part in &parts {
1388 if part.is_empty() {
1389 return Err(format!(
1390 "Invalid semantic version '{input}': found empty numeric component"
1391 ));
1392 }
1393 normalized_parts.push(*part);
1394 }
1395 while normalized_parts.len() < 3 {
1396 normalized_parts.push("0");
1397 }
1398
1399 let normalized_core = normalized_parts.join(".");
1400 Ok(format!("{normalized_core}{rest}"))
1401}
1402
1403pub(crate) fn parse_version_string(input: &str) -> std::result::Result<Version, String> {
1404 let normalized = normalize_version_input(input)?;
1405 Version::parse(&normalized).map_err(|err| format!("Invalid semantic version '{input}': {err}"))
1406}
1407
1408fn implied_prerelease_bump(version: &Version) -> std::result::Result<Bump, String> {
1409 if version.pre.is_empty() {
1410 return Err("Version does not contain a pre-release identifier".to_string());
1411 }
1412
1413 if version.minor == 0 && version.patch == 0 {
1414 Ok(Bump::Major)
1415 } else if version.patch == 0 {
1416 Ok(Bump::Minor)
1417 } else {
1418 Ok(Bump::Patch)
1419 }
1420}
1421
1422fn increment_prerelease(pre: &Prerelease) -> std::result::Result<Prerelease, String> {
1423 if pre.is_empty() {
1424 return Err("Pre-release identifier missing".to_string());
1425 }
1426
1427 let mut parts: Vec<String> = pre.as_str().split('.').map(|s| s.to_string()).collect();
1428 if parts.is_empty() {
1429 return Err("Pre-release identifier missing".to_string());
1430 }
1431
1432 let last_is_numeric = parts
1433 .last()
1434 .map(|part| part.chars().all(|ch| ch.is_ascii_digit()))
1435 .unwrap_or(false);
1436
1437 if last_is_numeric {
1438 let value = parts
1439 .last()
1440 .unwrap()
1441 .parse::<u64>()
1442 .map_err(|_| "Pre-release component is not a valid number".to_string())?;
1443 let incremented = value
1444 .checked_add(1)
1445 .ok_or_else(|| "Pre-release counter overflow".to_string())?;
1446 *parts.last_mut().unwrap() = incremented.to_string();
1447 } else {
1448 parts.push("1".to_string());
1449 }
1450
1451 let candidate = parts.join(".");
1452 Prerelease::new(&candidate).map_err(|err| format!("Invalid pre-release '{candidate}': {err}"))
1453}
1454
1455fn strip_trailing_numeric_identifiers(pre: &Prerelease) -> Option<Prerelease> {
1456 if pre.is_empty() {
1457 return None;
1458 }
1459
1460 let mut parts: Vec<&str> = pre.as_str().split('.').collect();
1461 while let Some(last) = parts.last() {
1462 if last.chars().all(|ch| ch.is_ascii_digit()) {
1463 parts.pop();
1464 } else {
1465 break;
1466 }
1467 }
1468
1469 if parts.is_empty() {
1470 None
1471 } else {
1472 let candidate = parts.join(".");
1473 Prerelease::new(&candidate).ok()
1474 }
1475}
1476
1477fn apply_base_bump(version: &mut Version, bump: Bump) -> std::result::Result<(), String> {
1478 match bump {
1479 Bump::Patch => {
1480 version.patch = version
1481 .patch
1482 .checked_add(1)
1483 .ok_or_else(|| "Patch component overflow".to_string())?;
1484 }
1485 Bump::Minor => {
1486 version.minor = version
1487 .minor
1488 .checked_add(1)
1489 .ok_or_else(|| "Minor component overflow".to_string())?;
1490 version.patch = 0;
1491 }
1492 Bump::Major => {
1493 version.major = version
1494 .major
1495 .checked_add(1)
1496 .ok_or_else(|| "Major component overflow".to_string())?;
1497 version.minor = 0;
1498 version.patch = 0;
1499 }
1500 }
1501 version.pre = Prerelease::EMPTY;
1502 version.build = BuildMetadata::EMPTY;
1503 Ok(())
1504}
1505
1506pub fn bump_version(old: &str, bump: Bump) -> std::result::Result<String, String> {
1508 let mut version = parse_version_string(old)?;
1509 let original_pre = version.pre.clone();
1510
1511 if original_pre.is_empty() {
1512 apply_base_bump(&mut version, bump)?;
1513 return Ok(version.to_string());
1514 }
1515
1516 let implied = implied_prerelease_bump(&version)?;
1517
1518 if bump <= implied {
1519 version.pre = increment_prerelease(&original_pre)?;
1520 version.build = BuildMetadata::EMPTY;
1521 Ok(version.to_string())
1522 } else {
1523 let base_pre = strip_trailing_numeric_identifiers(&original_pre).ok_or_else(|| {
1524 format!(
1525 "Pre-release version '{old}' must include a non-numeric identifier before the counter"
1526 )
1527 })?;
1528
1529 apply_base_bump(&mut version, bump)?;
1530 version.pre = base_pre;
1531 Ok(version.to_string())
1532 }
1533}
1534
1535fn split_intro_and_versions(body: &str) -> (&str, &str) {
1536 let mut offset = 0;
1537 let len = body.len();
1538 while offset < len {
1539 if body[offset..].starts_with("## ") {
1540 return body.split_at(offset);
1541 }
1542
1543 match body[offset..].find('\n') {
1544 Some(newline_offset) => {
1545 offset += newline_offset + 1;
1546 }
1547 None => break,
1548 }
1549 }
1550
1551 (body, "")
1552}
1553
1554fn header_matches_release_version(header_text: &str, version: &str) -> bool {
1555 if header_text == version {
1556 return true;
1557 }
1558
1559 header_text
1560 .strip_prefix(version)
1561 .map(|rest| {
1562 let trimmed = rest.trim_start();
1563 trimmed.is_empty() || trimmed.starts_with('—') || trimmed.starts_with('-')
1564 })
1565 .unwrap_or(false)
1566}
1567
1568fn update_changelog(
1569 crate_dir: &Path,
1570 package: &str,
1571 old_version: &str,
1572 new_version: &str,
1573 entries: &[(String, Bump)],
1574 release_date_display: Option<&str>,
1575) -> Result<()> {
1576 let path = crate_dir.join("CHANGELOG.md");
1577 let existing = if path.exists() {
1578 fs::read_to_string(&path)?
1579 } else {
1580 String::new()
1581 };
1582 let cleaned = existing.trim_start_matches('\u{feff}');
1583 let (intro_part, versions_part) = split_intro_and_versions(cleaned);
1584 let mut intro = intro_part.to_string();
1585 let mut versions_body = versions_part.to_string();
1586
1587 if intro.trim().is_empty() {
1588 intro = format!("# {}\n\n", package);
1589 }
1590
1591 let mut merged_major: Vec<String> = Vec::new();
1595 let mut merged_minor: Vec<String> = Vec::new();
1596 let mut merged_patch: Vec<String> = Vec::new();
1597
1598 let push_unique = |list: &mut Vec<String>, msg: &str| {
1600 if !list.iter().any(|m| m == msg) {
1601 list.push(msg.to_string());
1602 }
1603 };
1604
1605 for (msg, bump) in entries {
1607 match bump {
1608 Bump::Major => push_unique(&mut merged_major, msg),
1609 Bump::Minor => push_unique(&mut merged_minor, msg),
1610 Bump::Patch => push_unique(&mut merged_patch, msg),
1611 }
1612 }
1613
1614 let trimmed = versions_body.trim_start();
1618 if trimmed.starts_with("## ") {
1619 let mut lines_iter = trimmed.lines();
1621 let header_line = lines_iter.next().unwrap_or("").trim();
1622 let header_text = header_line.trim_start_matches("## ").trim();
1623
1624 let is_published_top = header_matches_release_version(header_text, old_version);
1625
1626 if !is_published_top {
1627 let after_header_offset = header_line.len();
1629 let rest_after_header = &trimmed[after_header_offset..];
1630 let next_rel = rest_after_header.find("\n## ");
1632 let (section_text, remaining) = match next_rel {
1633 Some(pos) => {
1634 let end = after_header_offset + pos + 1; (&trimmed[..end], &trimmed[end..])
1636 }
1637 None => (trimmed, ""),
1638 };
1639
1640 let mut current = None::<&str>;
1641 for line in section_text.lines() {
1642 let t = line.trim();
1643 if t.eq_ignore_ascii_case("### Major changes") {
1644 current = Some("major");
1645 continue;
1646 } else if t.eq_ignore_ascii_case("### Minor changes") {
1647 current = Some("minor");
1648 continue;
1649 } else if t.eq_ignore_ascii_case("### Patch changes") {
1650 current = Some("patch");
1651 continue;
1652 }
1653 if t.starts_with("- ") {
1654 let msg = t.trim_start_matches("- ").trim();
1655 match current {
1656 Some("major") => push_unique(&mut merged_major, msg),
1657 Some("minor") => push_unique(&mut merged_minor, msg),
1658 Some("patch") => push_unique(&mut merged_patch, msg),
1659 _ => {}
1660 }
1661 }
1662 }
1663
1664 versions_body = remaining.to_string();
1665 }
1666 }
1667
1668 let mut section = String::new();
1670 match release_date_display.and_then(|d| (!d.trim().is_empty()).then_some(d)) {
1671 Some(date) => section.push_str(&format!("## {new_version} — {date}\n\n")),
1672 None => section.push_str(&format!("## {new_version}\n\n")),
1673 }
1674
1675 if !merged_major.is_empty() {
1676 section.push_str("### Major changes\n\n");
1677 for msg in &merged_major {
1678 section.push_str(&crate::markdown::format_markdown_list_item(msg));
1679 }
1680 section.push('\n');
1681 }
1682 if !merged_minor.is_empty() {
1683 section.push_str("### Minor changes\n\n");
1684 for msg in &merged_minor {
1685 section.push_str(&crate::markdown::format_markdown_list_item(msg));
1686 }
1687 section.push('\n');
1688 }
1689 if !merged_patch.is_empty() {
1690 section.push_str("### Patch changes\n\n");
1691 for msg in &merged_patch {
1692 section.push_str(&crate::markdown::format_markdown_list_item(msg));
1693 }
1694 section.push('\n');
1695 }
1696
1697 let mut combined = String::new();
1698 combined.push_str(&intro);
1699
1700 if !combined.is_empty() && !combined.ends_with("\n\n") {
1701 if combined.ends_with('\n') {
1702 combined.push('\n');
1703 } else {
1704 combined.push_str("\n\n");
1705 }
1706 }
1707
1708 combined.push_str(§ion);
1709
1710 if !versions_body.trim().is_empty() {
1711 if !combined.ends_with("\n\n") {
1712 if combined.ends_with('\n') {
1713 combined.push('\n');
1714 } else {
1715 combined.push_str("\n\n");
1716 }
1717 }
1718 combined.push_str(&versions_body);
1719 }
1720
1721 fs::write(&path, combined)?;
1722 Ok(())
1723}
1724
1725fn validate_fixed_dependencies(config: &Config, workspace: &Workspace) -> Result<()> {
1727 resolve_config_groups(workspace, &config.fixed_dependencies, "packages.fixed")?;
1728 Ok(())
1729}
1730
1731#[cfg(test)]
1732mod tests {
1733 use super::*;
1734 use chrono::TimeZone;
1735 use std::collections::BTreeMap;
1736
1737 #[test]
1738 fn preserves_changelog_intro_when_updating() {
1739 use std::fs;
1740 use tempfile::tempdir;
1741
1742 let temp = tempdir().unwrap();
1743 let crate_dir = temp.path();
1744 let intro = "# Custom Changelog Header\n\nIntro text before versions.\n\n";
1745 let existing = format!(
1746 "{}## 1.0.0 — 2024-06-19\n\n### Patch changes\n\n- Existing entry\n",
1747 intro
1748 );
1749 fs::write(crate_dir.join("CHANGELOG.md"), existing).unwrap();
1750
1751 let entries = vec![("Add new feature".to_string(), Bump::Minor)];
1752 update_changelog(
1753 crate_dir,
1754 "my-package",
1755 "1.0.0",
1756 "1.0.1",
1757 &entries,
1758 Some("2024-06-20"),
1759 )
1760 .unwrap();
1761
1762 let updated = fs::read_to_string(crate_dir.join("CHANGELOG.md")).unwrap();
1763 assert!(updated.starts_with(intro));
1764
1765 let new_idx = updated.find("## 1.0.1").unwrap();
1766 let old_idx = updated.find("## 1.0.0").unwrap();
1767 assert!(new_idx >= intro.len());
1768 assert!(new_idx < old_idx);
1769 assert!(updated.contains("## 1.0.1 — 2024-06-20"));
1770 assert!(updated.contains("- Add new feature"));
1771 assert!(updated.contains("- Existing entry"));
1772 }
1773
1774 #[test]
1775 fn creates_default_header_when_missing_intro() {
1776 use std::fs;
1777 use tempfile::tempdir;
1778
1779 let temp = tempdir().unwrap();
1780 let crate_dir = temp.path();
1781
1782 let entries = vec![("Initial release".to_string(), Bump::Major)];
1783 update_changelog(crate_dir, "new-package", "0.1.0", "1.0.0", &entries, None).unwrap();
1784
1785 let updated = fs::read_to_string(crate_dir.join("CHANGELOG.md")).unwrap();
1786 assert!(updated.starts_with("# new-package\n\n## 1.0.0"));
1787 }
1788
1789 #[test]
1790 fn header_matches_release_version_handles_suffixes() {
1791 assert!(header_matches_release_version("1.0.0", "1.0.0"));
1792 assert!(header_matches_release_version(
1793 "1.0.0 — 2024-06-20",
1794 "1.0.0"
1795 ));
1796 assert!(header_matches_release_version("1.0.0-2024-06-20", "1.0.0"));
1797 assert!(!header_matches_release_version(
1798 "1.0.1 — 2024-06-20",
1799 "1.0.0"
1800 ));
1801 }
1802
1803 #[test]
1804 fn update_changelog_skips_blank_release_date() {
1805 use std::fs;
1806 use tempfile::tempdir;
1807
1808 let temp = tempdir().unwrap();
1809 let crate_dir = temp.path();
1810 let entries = vec![("Bug fix".to_string(), Bump::Patch)];
1811
1812 update_changelog(
1813 crate_dir,
1814 "blank-date",
1815 "0.1.0",
1816 "0.1.1",
1817 &entries,
1818 Some(" "),
1819 )
1820 .unwrap();
1821
1822 let updated = fs::read_to_string(crate_dir.join("CHANGELOG.md")).unwrap();
1823 assert!(updated.contains("## 0.1.1\n"));
1824 assert!(!updated.contains("—"));
1825 }
1826
1827 #[test]
1828 fn parse_release_date_timezone_accepts_utc() {
1829 match parse_release_date_timezone("UTC").unwrap() {
1830 ReleaseDateTimezone::Utc => {}
1831 _ => panic!("Expected UTC timezone"),
1832 }
1833 }
1834
1835 #[test]
1836 fn parse_release_date_timezone_accepts_offset() {
1837 match parse_release_date_timezone("+05:45").unwrap() {
1838 ReleaseDateTimezone::Offset(offset) => {
1839 assert_eq!(offset.local_minus_utc(), 5 * 3600 + 45 * 60);
1840 }
1841 _ => panic!("Expected fixed offset"),
1842 }
1843 }
1844
1845 #[test]
1846 fn parse_release_date_timezone_rejects_invalid() {
1847 let err = parse_release_date_timezone("Not/AZone").unwrap_err();
1848 let msg = err.to_string();
1849 assert!(msg.contains("release_date_timezone"));
1850 }
1851
1852 #[test]
1853 fn compute_release_date_display_uses_utc() {
1854 let cfg = Config {
1855 changelog_release_date_format: "%Z".to_string(),
1856 changelog_release_date_timezone: Some("UTC".to_string()),
1857 ..Default::default()
1858 };
1859
1860 let now = Utc.with_ymd_and_hms(2024, 1, 15, 12, 0, 0).unwrap();
1861 let display = compute_release_date_display_with_now(&cfg, now)
1862 .unwrap()
1863 .unwrap();
1864 assert_eq!(display, "UTC");
1865 }
1866
1867 #[test]
1868 fn parse_release_date_timezone_accepts_named_zone() {
1869 match parse_release_date_timezone("Europe/Paris").unwrap() {
1870 ReleaseDateTimezone::Named(zone) => {
1871 assert_eq!(zone, chrono_tz::Europe::Paris);
1872 }
1873 _ => panic!("Expected named timezone"),
1874 }
1875 }
1876
1877 #[test]
1878 fn compute_release_date_display_uses_offset() {
1879 let cfg = Config {
1880 changelog_release_date_format: "%z".to_string(),
1881 changelog_release_date_timezone: Some("-03:30".to_string()),
1882 ..Default::default()
1883 };
1884
1885 let now = Utc.with_ymd_and_hms(2024, 6, 1, 12, 0, 0).unwrap();
1886 let display = compute_release_date_display_with_now(&cfg, now)
1887 .unwrap()
1888 .unwrap();
1889 assert_eq!(display, "-0330");
1890 }
1891
1892 #[test]
1893 fn compute_release_date_display_uses_named_zone() {
1894 let cfg = Config {
1895 changelog_release_date_format: "%Z".to_string(),
1896 changelog_release_date_timezone: Some("America/New_York".to_string()),
1897 ..Default::default()
1898 };
1899
1900 let now = Utc.with_ymd_and_hms(2024, 1, 15, 12, 0, 0).unwrap();
1901 let display = compute_release_date_display_with_now(&cfg, now)
1902 .unwrap()
1903 .unwrap();
1904 assert_eq!(display, "EST");
1905 }
1906
1907 #[test]
1908 fn test_ignore_packages_in_dependency_cascade() {
1909 use crate::types::{PackageInfo, PackageKind, Workspace};
1910 use std::path::PathBuf;
1911
1912 let root = PathBuf::from("/tmp/test");
1914 let workspace = Workspace {
1915 root: root.clone(),
1916 members: vec![
1917 PackageInfo {
1918 name: "main-package".to_string(),
1919 identifier: "cargo/main-package".to_string(),
1920 version: "1.0.0".to_string(),
1921 path: root.join("main-package"),
1922 internal_deps: BTreeSet::new(),
1923 kind: PackageKind::Cargo,
1924 },
1925 PackageInfo {
1926 name: "examples-package".to_string(),
1927 identifier: "cargo/examples-package".to_string(),
1928 version: "1.0.0".to_string(),
1929 path: root.join("examples/package"),
1930 internal_deps: BTreeSet::new(),
1931 kind: PackageKind::Cargo,
1932 },
1933 PackageInfo {
1934 name: "benchmarks-utils".to_string(),
1935 identifier: "cargo/benchmarks-utils".to_string(),
1936 version: "1.0.0".to_string(),
1937 path: root.join("benchmarks/utils"),
1938 internal_deps: BTreeSet::new(),
1939 kind: PackageKind::Cargo,
1940 },
1941 ],
1942 };
1943
1944 let config = Config {
1946 ignore: vec!["examples/*".to_string(), "benchmarks/*".to_string()],
1947 ..Default::default()
1948 };
1949
1950 let mut dependents = BTreeMap::new();
1952 dependents.insert(
1953 "cargo/main-package".to_string(),
1954 ["cargo/examples-package", "cargo/benchmarks-utils"]
1955 .iter()
1956 .map(|s| s.to_string())
1957 .collect(),
1958 );
1959
1960 let mut bump_by_pkg = BTreeMap::new();
1962 bump_by_pkg.insert("cargo/main-package".to_string(), Bump::Minor);
1963
1964 apply_dependency_cascade(&mut bump_by_pkg, &dependents, &config, &workspace).unwrap();
1966
1967 assert_eq!(bump_by_pkg.len(), 1);
1969 assert!(bump_by_pkg.contains_key("cargo/main-package"));
1970 assert!(!bump_by_pkg.contains_key("cargo/examples-package"));
1971 assert!(!bump_by_pkg.contains_key("cargo/benchmarks-utils"));
1972 }
1973
1974 #[test]
1975 fn test_ignored_packages_excluded_from_dependency_graph() {
1976 use crate::types::{PackageInfo, PackageKind, Workspace};
1977 use std::collections::BTreeSet;
1978 use std::path::PathBuf;
1979
1980 let root = PathBuf::from("/tmp/test");
1981 let workspace = Workspace {
1982 root: root.clone(),
1983 members: vec![
1984 PackageInfo {
1985 name: "main-package".to_string(),
1986 identifier: "cargo/main-package".to_string(),
1987 version: "1.0.0".to_string(),
1988 path: root.join("main-package"),
1989 internal_deps: ["cargo/examples-package".to_string()].into_iter().collect(),
1990 kind: PackageKind::Cargo,
1991 },
1992 PackageInfo {
1993 name: "examples-package".to_string(),
1994 identifier: "cargo/examples-package".to_string(),
1995 version: "1.0.0".to_string(),
1996 path: root.join("examples/package"),
1997 internal_deps: BTreeSet::new(),
1998 kind: PackageKind::Cargo,
1999 },
2000 ],
2001 };
2002
2003 let config = Config {
2005 ignore: vec!["examples/*".to_string()],
2006 ..Default::default()
2007 };
2008
2009 let dependents = build_dependency_graph(&workspace, &config);
2011
2012 assert!(!dependents.contains_key("cargo/examples-package"));
2015
2016 assert!(dependents.is_empty());
2019 }
2020}