sampo_core/
release.rs

1use crate::errors::{Result, SampoError, io_error_with_path};
2use crate::filters::should_ignore_crate;
3use crate::manifest::{ManifestMetadata, update_manifest_versions};
4use crate::types::{Bump, CrateInfo, DependencyUpdate, ReleaseOutput, ReleasedPackage, Workspace};
5use crate::{
6    changeset::ChangesetInfo, config::Config, current_branch, detect_github_repo_slug_with_config,
7    discover_workspace, enrich_changeset_message, get_commit_hash_for_path, load_changesets,
8};
9use chrono::{DateTime, FixedOffset, Local, Utc};
10use chrono_tz::Tz;
11use rustc_hash::FxHashSet;
12use semver::{BuildMetadata, Prerelease, Version};
13use std::collections::{BTreeMap, BTreeSet};
14use std::ffi::OsStr;
15use std::fs;
16use std::path::{Path, PathBuf};
17use std::process::Command;
18
19/// Format dependency updates for changelog display
20///
21/// Creates a message in the style of Changesets for dependency updates,
22/// e.g., "Updated dependencies [hash]: pkg1@1.2.0, pkg2@2.0.0"
23pub fn format_dependency_updates_message(updates: &[DependencyUpdate]) -> Option<String> {
24    if updates.is_empty() {
25        return None;
26    }
27
28    let dep_list = updates
29        .iter()
30        .map(|dep| format!("{}@{}", dep.name, dep.new_version))
31        .collect::<Vec<_>>()
32        .join(", ");
33
34    Some(format!("Updated dependencies: {}", dep_list))
35}
36
37/// Convert a list of (name, version) tuples into DependencyUpdate structs
38pub fn build_dependency_updates(updates: &[(String, String)]) -> Vec<DependencyUpdate> {
39    updates
40        .iter()
41        .map(|(name, version)| DependencyUpdate {
42            name: name.clone(),
43            new_version: version.clone(),
44        })
45        .collect()
46}
47
48/// Create a changelog entry for dependency updates
49///
50/// Returns a tuple of (message, bump_type) suitable for adding to changelog messages
51pub fn create_dependency_update_entry(updates: &[DependencyUpdate]) -> Option<(String, Bump)> {
52    format_dependency_updates_message(updates).map(|msg| (msg, Bump::Patch))
53}
54
55/// Create a changelog entry for fixed dependency group policy
56///
57/// Returns a tuple of (message, bump_type) suitable for adding to changelog messages
58pub fn create_fixed_dependency_policy_entry(bump: Bump) -> (String, Bump) {
59    (
60        "Bumped due to fixed dependency group policy".to_string(),
61        bump,
62    )
63}
64
65/// Infer bump type from version changes
66///
67/// This helper function determines the semantic version bump type based on
68/// the difference between old and new version strings.
69pub fn infer_bump_from_versions(old_ver: &str, new_ver: &str) -> Bump {
70    let old_parts: Vec<u32> = old_ver.split('.').filter_map(|s| s.parse().ok()).collect();
71    let new_parts: Vec<u32> = new_ver.split('.').filter_map(|s| s.parse().ok()).collect();
72
73    if old_parts.len() >= 3 && new_parts.len() >= 3 {
74        if new_parts[0] > old_parts[0] {
75            Bump::Major
76        } else if new_parts[1] > old_parts[1] {
77            Bump::Minor
78        } else {
79            Bump::Patch
80        }
81    } else {
82        Bump::Patch
83    }
84}
85
86/// Detect all dependency-related explanations for package releases
87///
88/// This function is the unified entry point for detecting all types of automatic
89/// dependency-related changelog entries. It identifies:
90/// - Packages bumped due to internal dependency updates ("Updated dependencies: ...")
91/// - Packages bumped due to fixed dependency group policy ("Bumped due to fixed dependency group policy")
92///
93/// # Arguments
94/// * `changesets` - The changesets being processed
95/// * `workspace` - The workspace containing all packages
96/// * `config` - The configuration with dependency policies
97/// * `releases` - Map of package name to (old_version, new_version) for all planned releases
98///
99/// # Returns
100/// A map of package name to list of (message, bump_type) explanations to add to changelogs
101pub fn detect_all_dependency_explanations(
102    changesets: &[ChangesetInfo],
103    workspace: &Workspace,
104    config: &Config,
105    releases: &BTreeMap<String, (String, String)>,
106) -> BTreeMap<String, Vec<(String, Bump)>> {
107    let mut messages_by_pkg: BTreeMap<String, Vec<(String, Bump)>> = BTreeMap::new();
108
109    // 1. Detect packages bumped due to fixed dependency group policy
110    let bumped_packages: BTreeSet<String> = releases.keys().cloned().collect();
111    let policy_packages =
112        detect_fixed_dependency_policy_packages(changesets, workspace, config, &bumped_packages);
113
114    for (pkg_name, policy_bump) in policy_packages {
115        // For accurate bump detection, infer from actual version changes
116        let actual_bump = if let Some((old_ver, new_ver)) = releases.get(&pkg_name) {
117            infer_bump_from_versions(old_ver, new_ver)
118        } else {
119            policy_bump
120        };
121
122        let (msg, bump_type) = create_fixed_dependency_policy_entry(actual_bump);
123        messages_by_pkg
124            .entry(pkg_name)
125            .or_default()
126            .push((msg, bump_type));
127    }
128
129    // 2. Detect packages bumped due to internal dependency updates
130    // Note: Even packages with explicit changesets can have dependency updates
131
132    // Build new version lookup from releases
133    let new_version_by_name: BTreeMap<String, String> = releases
134        .iter()
135        .map(|(name, (_old, new_ver))| (name.clone(), new_ver.clone()))
136        .collect();
137
138    // Build map of crate name -> CrateInfo for quick lookup (only non-ignored packages)
139    let by_name: BTreeMap<String, &CrateInfo> = workspace
140        .members
141        .iter()
142        .filter(|c| !should_ignore_crate(config, workspace, c).unwrap_or(false))
143        .map(|c| (c.name.clone(), c))
144        .collect();
145
146    // For each released crate, check if it has internal dependencies that were updated
147    for crate_name in releases.keys() {
148        if let Some(crate_info) = by_name.get(crate_name) {
149            // Find which internal dependencies were updated
150            let mut updated_deps = Vec::new();
151            for dep_name in &crate_info.internal_deps {
152                if let Some(new_version) = new_version_by_name.get(dep_name as &str) {
153                    // This internal dependency was updated
154                    updated_deps.push((dep_name.clone(), new_version.clone()));
155                }
156            }
157
158            if !updated_deps.is_empty() {
159                // Create dependency update entry
160                let updates = build_dependency_updates(&updated_deps);
161                if let Some((msg, bump)) = create_dependency_update_entry(&updates) {
162                    messages_by_pkg
163                        .entry(crate_name.clone())
164                        .or_default()
165                        .push((msg, bump));
166                }
167            }
168        }
169    }
170
171    messages_by_pkg
172}
173
174/// Detect packages that need fixed dependency group policy messages
175///
176/// This function identifies packages that were bumped solely due to fixed dependency
177/// group policies (not due to direct changesets or normal dependency cascades).
178/// Returns a map of package name to the bump level they received.
179pub fn detect_fixed_dependency_policy_packages(
180    changesets: &[ChangesetInfo],
181    workspace: &Workspace,
182    config: &Config,
183    bumped_packages: &BTreeSet<String>,
184) -> BTreeMap<String, Bump> {
185    // Build set of packages with direct changesets
186    let packages_with_changesets: BTreeSet<String> = changesets
187        .iter()
188        .flat_map(|cs| cs.entries.iter().map(|(name, _)| name.clone()))
189        .collect();
190
191    // Build dependency graph (dependent -> set of dependencies) - only non-ignored packages
192    let mut dependents: BTreeMap<String, BTreeSet<String>> = BTreeMap::new();
193    for crate_info in &workspace.members {
194        // Skip ignored packages when building the dependency graph
195        if should_ignore_crate(config, workspace, crate_info).unwrap_or(false) {
196            continue;
197        }
198
199        for dep_name in &crate_info.internal_deps {
200            dependents
201                .entry(dep_name.clone())
202                .or_default()
203                .insert(crate_info.name.clone());
204        }
205    }
206
207    // Find packages affected by normal dependency cascade
208    let mut packages_affected_by_cascade = BTreeSet::new();
209    for pkg_with_changeset in &packages_with_changesets {
210        let mut queue = vec![pkg_with_changeset.clone()];
211        let mut visited = BTreeSet::new();
212
213        while let Some(pkg) = queue.pop() {
214            if visited.contains(&pkg) {
215                continue;
216            }
217            visited.insert(pkg.clone());
218
219            if let Some(deps) = dependents.get(&pkg) {
220                for dep in deps {
221                    packages_affected_by_cascade.insert(dep.clone());
222                    queue.push(dep.clone());
223                }
224            }
225        }
226    }
227
228    // Find packages that need fixed dependency policy messages
229    let mut result = BTreeMap::new();
230
231    for pkg_name in bumped_packages {
232        // Skip if package has direct changeset
233        if packages_with_changesets.contains(pkg_name) {
234            continue;
235        }
236
237        // Skip if package is affected by normal dependency cascade
238        if packages_affected_by_cascade.contains(pkg_name) {
239            continue;
240        }
241
242        // Check if this package is in a fixed dependency group with an affected package
243        for group in &config.fixed_dependencies {
244            if group.contains(&pkg_name.to_string()) {
245                // Check if any other package in this group has changes
246                let has_affected_group_member = group.iter().any(|group_member| {
247                    group_member != pkg_name
248                        && (packages_with_changesets.contains(group_member)
249                            || packages_affected_by_cascade.contains(group_member))
250                });
251
252                if has_affected_group_member {
253                    // Find the highest bump level in the group to determine the policy bump
254                    let group_bump = group
255                        .iter()
256                        .filter_map(|member| {
257                            if packages_with_changesets.contains(member) {
258                                // Find the highest bump from changesets affecting this member
259                                changesets
260                                    .iter()
261                                    .filter_map(|cs| {
262                                        cs.entries
263                                            .iter()
264                                            .find(|(name, _)| name == member)
265                                            .map(|(_, b)| *b)
266                                    })
267                                    .max()
268                            } else {
269                                None
270                            }
271                        })
272                        .max()
273                        .unwrap_or(Bump::Patch);
274
275                    result.insert(pkg_name.clone(), group_bump);
276                    break;
277                }
278            }
279        }
280    }
281
282    result
283}
284
285/// Type alias for initial bumps computation result
286type InitialBumpsResult = (
287    BTreeMap<String, Bump>,                // bump_by_pkg
288    BTreeMap<String, Vec<(String, Bump)>>, // messages_by_pkg
289    BTreeSet<std::path::PathBuf>,          // used_paths
290);
291
292/// Type alias for release plan
293type ReleasePlan = Vec<(String, String, String)>; // (name, old_version, new_version)
294
295/// Aggregated data required to apply a planned release
296struct PlanState {
297    messages_by_pkg: BTreeMap<String, Vec<(String, Bump)>>,
298    used_paths: BTreeSet<PathBuf>,
299    releases: ReleasePlan,
300    released_packages: Vec<ReleasedPackage>,
301}
302
303/// Possible outcomes when computing a release plan from a set of changesets
304enum PlanOutcome {
305    NoApplicablePackages,
306    NoMatchingCrates,
307    Plan(PlanState),
308}
309
310/// Main release function that can be called from CLI or other interfaces
311pub fn run_release(root: &std::path::Path, dry_run: bool) -> Result<ReleaseOutput> {
312    let workspace = discover_workspace(root)?;
313    let config = Config::load(&workspace.root)?;
314
315    let branch = current_branch()?;
316    if !config.is_release_branch(&branch) {
317        return Err(SampoError::Release(format!(
318            "Branch '{}' is not configured for releases (allowed: {:?})",
319            branch,
320            config.release_branches().into_iter().collect::<Vec<_>>()
321        )));
322    }
323
324    // Validate fixed dependencies configuration
325    validate_fixed_dependencies(&config, &workspace)?;
326
327    let changesets_dir = workspace.root.join(".sampo").join("changesets");
328    let prerelease_dir = workspace.root.join(".sampo").join("prerelease");
329
330    let current_changesets = load_changesets(&changesets_dir)?;
331    let preserved_changesets = load_changesets(&prerelease_dir)?;
332
333    let mut using_preserved = false;
334    let mut cached_plan_state: Option<PlanState> = None;
335
336    if current_changesets.is_empty() {
337        if preserved_changesets.is_empty() {
338            println!(
339                "No changesets found in {}",
340                workspace.root.join(".sampo").join("changesets").display()
341            );
342            return Ok(ReleaseOutput {
343                released_packages: vec![],
344                dry_run,
345            });
346        }
347        using_preserved = true;
348    } else {
349        match compute_plan_state(&current_changesets, &workspace, &config)? {
350            PlanOutcome::Plan(plan) => {
351                let is_prerelease_preview = releases_include_prerelease(&plan.releases);
352                if !is_prerelease_preview && !preserved_changesets.is_empty() {
353                    using_preserved = true;
354                } else {
355                    cached_plan_state = Some(plan);
356                }
357            }
358            PlanOutcome::NoApplicablePackages => {
359                if preserved_changesets.is_empty() {
360                    println!("No applicable packages found in changesets.");
361                    return Ok(ReleaseOutput {
362                        released_packages: vec![],
363                        dry_run,
364                    });
365                }
366                using_preserved = true;
367            }
368            PlanOutcome::NoMatchingCrates => {
369                if preserved_changesets.is_empty() {
370                    println!("No matching workspace crates to release.");
371                    return Ok(ReleaseOutput {
372                        released_packages: vec![],
373                        dry_run,
374                    });
375                }
376                using_preserved = true;
377            }
378        }
379    }
380
381    let mut final_changesets;
382    let plan_state = if using_preserved {
383        if dry_run {
384            final_changesets = current_changesets;
385            final_changesets.extend(preserved_changesets);
386        } else {
387            restore_prerelease_changesets(&prerelease_dir, &changesets_dir)?;
388            final_changesets = load_changesets(&changesets_dir)?;
389        }
390
391        match compute_plan_state(&final_changesets, &workspace, &config)? {
392            PlanOutcome::Plan(plan) => plan,
393            PlanOutcome::NoApplicablePackages => {
394                println!("No applicable packages found in changesets.");
395                return Ok(ReleaseOutput {
396                    released_packages: vec![],
397                    dry_run,
398                });
399            }
400            PlanOutcome::NoMatchingCrates => {
401                println!("No matching workspace crates to release.");
402                return Ok(ReleaseOutput {
403                    released_packages: vec![],
404                    dry_run,
405                });
406            }
407        }
408    } else {
409        final_changesets = current_changesets;
410        match cached_plan_state {
411            Some(plan) => plan,
412            None => match compute_plan_state(&final_changesets, &workspace, &config)? {
413                PlanOutcome::Plan(plan) => plan,
414                PlanOutcome::NoApplicablePackages => {
415                    println!("No applicable packages found in changesets.");
416                    return Ok(ReleaseOutput {
417                        released_packages: vec![],
418                        dry_run,
419                    });
420                }
421                PlanOutcome::NoMatchingCrates => {
422                    println!("No matching workspace crates to release.");
423                    return Ok(ReleaseOutput {
424                        released_packages: vec![],
425                        dry_run,
426                    });
427                }
428            },
429        }
430    };
431
432    let PlanState {
433        mut messages_by_pkg,
434        used_paths,
435        releases,
436        released_packages,
437    } = plan_state;
438
439    print_release_plan(&releases);
440
441    let is_prerelease_release = releases_include_prerelease(&releases);
442
443    if dry_run {
444        println!("Dry-run: no files modified, no tags created.");
445        return Ok(ReleaseOutput {
446            released_packages,
447            dry_run: true,
448        });
449    }
450
451    apply_releases(
452        &releases,
453        &workspace,
454        &mut messages_by_pkg,
455        &final_changesets,
456        &config,
457    )?;
458
459    finalize_consumed_changesets(used_paths, &workspace.root, is_prerelease_release)?;
460
461    // If the workspace has a lockfile, regenerate it so the release branch includes
462    // a consistent, up-to-date Cargo.lock and avoids a dirty working tree later.
463    // This runs only when a lockfile already exists, to keep tests (which create
464    // ephemeral workspaces without lockfiles) fast and deterministic.
465    if workspace.root.join("Cargo.lock").exists()
466        && let Err(e) = regenerate_lockfile(&workspace.root)
467    {
468        // Do not fail the release if regenerating the lockfile fails.
469        // Emit a concise warning and continue to keep behavior resilient.
470        eprintln!("Warning: failed to regenerate Cargo.lock, {}", e);
471    }
472
473    Ok(ReleaseOutput {
474        released_packages,
475        dry_run: false,
476    })
477}
478
479fn compute_plan_state(
480    changesets: &[ChangesetInfo],
481    workspace: &Workspace,
482    config: &Config,
483) -> Result<PlanOutcome> {
484    let (mut bump_by_pkg, messages_by_pkg, used_paths) =
485        compute_initial_bumps(changesets, workspace, config)?;
486
487    if bump_by_pkg.is_empty() {
488        return Ok(PlanOutcome::NoApplicablePackages);
489    }
490
491    let dependents = build_dependency_graph(workspace, config);
492    apply_dependency_cascade(&mut bump_by_pkg, &dependents, config, workspace);
493    apply_linked_dependencies(&mut bump_by_pkg, config);
494
495    let releases = prepare_release_plan(&bump_by_pkg, workspace)?;
496    if releases.is_empty() {
497        return Ok(PlanOutcome::NoMatchingCrates);
498    }
499
500    let released_packages: Vec<ReleasedPackage> = releases
501        .iter()
502        .map(|(name, old_version, new_version)| {
503            let bump = bump_by_pkg.get(name).copied().unwrap_or(Bump::Patch);
504            ReleasedPackage {
505                name: name.clone(),
506                old_version: old_version.clone(),
507                new_version: new_version.clone(),
508                bump,
509            }
510        })
511        .collect();
512
513    Ok(PlanOutcome::Plan(PlanState {
514        messages_by_pkg,
515        used_paths,
516        releases,
517        released_packages,
518    }))
519}
520
521fn releases_include_prerelease(releases: &ReleasePlan) -> bool {
522    releases.iter().any(|(_, _, new_version)| {
523        Version::parse(new_version)
524            .map(|v| !v.pre.is_empty())
525            .unwrap_or(false)
526    })
527}
528
529pub(crate) fn restore_prerelease_changesets(
530    prerelease_dir: &Path,
531    changesets_dir: &Path,
532) -> Result<()> {
533    if !prerelease_dir.exists() {
534        return Ok(());
535    }
536
537    for entry in fs::read_dir(prerelease_dir)? {
538        let entry = entry?;
539        let path = entry.path();
540        if !path.is_file() {
541            continue;
542        }
543        if path.extension().and_then(|ext| ext.to_str()) != Some("md") {
544            continue;
545        }
546
547        // Ignore the new location; only errors matter here
548        let _ = move_changeset_file(&path, changesets_dir)?;
549    }
550
551    Ok(())
552}
553
554fn finalize_consumed_changesets(
555    used_paths: BTreeSet<PathBuf>,
556    workspace_root: &Path,
557    preserve_for_prerelease: bool,
558) -> Result<()> {
559    if used_paths.is_empty() {
560        return Ok(());
561    }
562
563    if preserve_for_prerelease {
564        let prerelease_dir = workspace_root.join(".sampo").join("prerelease");
565        for path in used_paths {
566            if !path.exists() {
567                continue;
568            }
569            let _ = move_changeset_file(&path, &prerelease_dir)?;
570        }
571        println!("Preserved consumed changesets for pre-release.");
572    } else {
573        for path in used_paths {
574            if !path.exists() {
575                continue;
576            }
577            fs::remove_file(&path).map_err(|err| SampoError::Io(io_error_with_path(err, &path)))?;
578        }
579        println!("Removed consumed changesets.");
580    }
581
582    Ok(())
583}
584
585pub(crate) fn move_changeset_file(source: &Path, dest_dir: &Path) -> Result<PathBuf> {
586    if !source.exists() {
587        return Ok(source.to_path_buf());
588    }
589
590    fs::create_dir_all(dest_dir)?;
591    let file_name = source
592        .file_name()
593        .ok_or_else(|| SampoError::Changeset("Invalid changeset file name".to_string()))?;
594
595    let mut destination = dest_dir.join(file_name);
596    if destination == source {
597        return Ok(destination);
598    }
599
600    if destination.exists() {
601        destination = unique_destination_path(dest_dir, file_name);
602    }
603
604    fs::rename(source, &destination)?;
605    Ok(destination)
606}
607
608fn unique_destination_path(dir: &Path, file_name: &OsStr) -> PathBuf {
609    let file_path = Path::new(file_name);
610    let stem = file_path
611        .file_stem()
612        .map(|s| s.to_string_lossy().into_owned())
613        .unwrap_or_else(|| file_name.to_string_lossy().into_owned());
614    let ext = file_path
615        .extension()
616        .map(|s| s.to_string_lossy().into_owned());
617
618    let mut counter = 1;
619    loop {
620        let candidate_name = if let Some(ref ext) = ext {
621            format!("{}-{}.{}", stem, counter, ext)
622        } else {
623            format!("{}-{}", stem, counter)
624        };
625        let candidate = dir.join(&candidate_name);
626        if !candidate.exists() {
627            return candidate;
628        }
629        counter += 1;
630    }
631}
632
633/// Regenerate the Cargo.lock at the workspace root using Cargo.
634///
635/// Uses `cargo generate-lockfile`, which will rebuild the lockfile with the latest
636/// compatible versions, ensuring the lockfile reflects the new workspace versions.
637pub(crate) fn regenerate_lockfile(root: &Path) -> Result<()> {
638    let mut cmd = Command::new("cargo");
639    cmd.arg("generate-lockfile").current_dir(root);
640
641    println!("Regenerating Cargo.lock…");
642    let status = cmd.status().map_err(SampoError::Io)?;
643    if !status.success() {
644        return Err(SampoError::Release(format!(
645            "cargo generate-lockfile failed with status {}",
646            status
647        )));
648    }
649    println!("Cargo.lock updated.");
650    Ok(())
651}
652
653/// Compute initial bumps from changesets and collect messages
654fn compute_initial_bumps(
655    changesets: &[ChangesetInfo],
656    ws: &Workspace,
657    cfg: &Config,
658) -> Result<InitialBumpsResult> {
659    let mut bump_by_pkg: BTreeMap<String, Bump> = BTreeMap::new();
660    let mut messages_by_pkg: BTreeMap<String, Vec<(String, Bump)>> = BTreeMap::new();
661    let mut used_paths: BTreeSet<std::path::PathBuf> = BTreeSet::new();
662
663    // Resolve GitHub repo slug once if available (config, env or origin remote)
664    let repo_slug = detect_github_repo_slug_with_config(&ws.root, cfg.github_repository.as_deref());
665    let github_token = std::env::var("GITHUB_TOKEN")
666        .ok()
667        .or_else(|| std::env::var("GH_TOKEN").ok());
668
669    // Build quick lookup for crate info
670    let mut by_name: BTreeMap<String, &CrateInfo> = BTreeMap::new();
671    for c in &ws.members {
672        by_name.insert(c.name.clone(), c);
673    }
674
675    for cs in changesets {
676        let mut consumed_changeset = false;
677        for (pkg, bump) in &cs.entries {
678            if let Some(info) = by_name.get(pkg)
679                && should_ignore_crate(cfg, ws, info)?
680            {
681                continue;
682            }
683
684            // Mark this changeset as consumed since at least one package is applicable
685            consumed_changeset = true;
686
687            bump_by_pkg
688                .entry(pkg.clone())
689                .and_modify(|b| {
690                    if *bump > *b {
691                        *b = *bump;
692                    }
693                })
694                .or_insert(*bump);
695
696            // Enrich message with commit info and acknowledgments
697            let commit_hash = get_commit_hash_for_path(&ws.root, &cs.path);
698            let enriched = if let Some(hash) = commit_hash {
699                enrich_changeset_message(
700                    &cs.message,
701                    &hash,
702                    &ws.root,
703                    repo_slug.as_deref(),
704                    github_token.as_deref(),
705                    cfg.changelog_show_commit_hash,
706                    cfg.changelog_show_acknowledgments,
707                )
708            } else {
709                cs.message.clone()
710            };
711
712            messages_by_pkg
713                .entry(pkg.clone())
714                .or_default()
715                .push((enriched, *bump));
716        }
717        if consumed_changeset {
718            used_paths.insert(cs.path.clone());
719        }
720    }
721
722    Ok((bump_by_pkg, messages_by_pkg, used_paths))
723}
724
725/// Build reverse dependency graph: dep -> set of dependents
726/// Only includes non-ignored packages in the graph
727fn build_dependency_graph(ws: &Workspace, cfg: &Config) -> BTreeMap<String, BTreeSet<String>> {
728    let mut dependents: BTreeMap<String, BTreeSet<String>> = BTreeMap::new();
729
730    // Build a set of ignored package names for quick lookup
731    let ignored_packages: BTreeSet<String> = ws
732        .members
733        .iter()
734        .filter(|c| should_ignore_crate(cfg, ws, c).unwrap_or(false))
735        .map(|c| c.name.clone())
736        .collect();
737
738    for c in &ws.members {
739        // Skip ignored packages when building the dependency graph
740        if ignored_packages.contains(&c.name) {
741            continue;
742        }
743
744        for dep in &c.internal_deps {
745            // Also skip dependencies that point to ignored packages
746            if ignored_packages.contains(dep) {
747                continue;
748            }
749
750            dependents
751                .entry(dep.clone())
752                .or_default()
753                .insert(c.name.clone());
754        }
755    }
756    dependents
757}
758
759/// Apply dependency cascade logic and fixed dependency groups
760fn apply_dependency_cascade(
761    bump_by_pkg: &mut BTreeMap<String, Bump>,
762    dependents: &BTreeMap<String, BTreeSet<String>>,
763    cfg: &Config,
764    ws: &Workspace,
765) {
766    // Helper function to find which fixed group a package belongs to, if any
767    let find_fixed_group = |pkg_name: &str| -> Option<usize> {
768        cfg.fixed_dependencies
769            .iter()
770            .position(|group| group.contains(&pkg_name.to_string()))
771    };
772
773    // Build a quick lookup map for crate info
774    let mut by_name: BTreeMap<String, &CrateInfo> = BTreeMap::new();
775    for c in &ws.members {
776        by_name.insert(c.name.clone(), c);
777    }
778
779    let mut queue: Vec<String> = bump_by_pkg.keys().cloned().collect();
780    let mut seen: BTreeSet<String> = queue.iter().cloned().collect();
781
782    while let Some(changed) = queue.pop() {
783        let changed_bump = bump_by_pkg.get(&changed).copied().unwrap_or(Bump::Patch);
784
785        // 1. Handle normal dependency relationships (unchanged → dependent)
786        if let Some(deps) = dependents.get(&changed) {
787            for dep_name in deps {
788                // Check if this dependent package should be ignored
789                if let Some(info) = by_name.get(dep_name) {
790                    match should_ignore_crate(cfg, ws, info) {
791                        Ok(true) => continue,
792                        Ok(false) => {} // Continue processing
793                        Err(_) => {
794                            // On I/O error reading manifest, err on the side of not ignoring
795                            // This maintains backwards compatibility and avoids silent failures
796                        }
797                    }
798                }
799
800                // Determine bump level for this dependent
801                let dependent_bump = if find_fixed_group(dep_name).is_some() {
802                    // Fixed dependencies: same bump level as the dependency
803                    changed_bump
804                } else {
805                    // Normal dependencies: at least patch
806                    Bump::Patch
807                };
808
809                let entry = bump_by_pkg
810                    .entry(dep_name.clone())
811                    .or_insert(dependent_bump);
812                // If already present, keep the higher bump
813                if *entry < dependent_bump {
814                    *entry = dependent_bump;
815                }
816                if !seen.contains(dep_name) {
817                    queue.push(dep_name.clone());
818                    seen.insert(dep_name.clone());
819                }
820            }
821        }
822
823        // 2. Handle fixed dependency groups (bidirectional)
824        if let Some(group_idx) = find_fixed_group(&changed) {
825            // All packages in the same fixed group should bump together
826            for group_member in &cfg.fixed_dependencies[group_idx] {
827                if group_member != &changed {
828                    // Check if this group member should be ignored
829                    if let Some(info) = by_name.get(group_member) {
830                        match should_ignore_crate(cfg, ws, info) {
831                            Ok(true) => continue,
832                            Ok(false) => {} // Continue processing
833                            Err(_) => {
834                                // On I/O error reading manifest, err on the side of not ignoring
835                                // This maintains backwards compatibility and avoids silent failures
836                            }
837                        }
838                    }
839
840                    let entry = bump_by_pkg
841                        .entry(group_member.clone())
842                        .or_insert(changed_bump);
843                    // If already present, keep the higher bump
844                    if *entry < changed_bump {
845                        *entry = changed_bump;
846                    }
847                    if !seen.contains(group_member) {
848                        queue.push(group_member.clone());
849                        seen.insert(group_member.clone());
850                    }
851                }
852            }
853        }
854    }
855}
856
857/// Apply linked dependencies logic: highest bump level to affected packages only
858fn apply_linked_dependencies(bump_by_pkg: &mut BTreeMap<String, Bump>, cfg: &Config) {
859    for group in &cfg.linked_dependencies {
860        // Check if any package in this group has been bumped
861        let mut group_has_bumps = false;
862        let mut highest_bump = Bump::Patch;
863
864        // First pass: find the highest bump level in the group among affected packages
865        for group_member in group {
866            if let Some(&member_bump) = bump_by_pkg.get(group_member) {
867                group_has_bumps = true;
868                if member_bump > highest_bump {
869                    highest_bump = member_bump;
870                }
871            }
872        }
873
874        // If any package in the group is being bumped, apply highest bump to affected packages only
875        if group_has_bumps {
876            // Apply the highest bump level to packages that are already being bumped
877            // (either directly affected or through dependency cascade)
878            for group_member in group {
879                if bump_by_pkg.contains_key(group_member) {
880                    // Only update if the current bump is lower than the group's highest bump
881                    let current_bump = bump_by_pkg
882                        .get(group_member)
883                        .copied()
884                        .unwrap_or(Bump::Patch);
885                    if highest_bump > current_bump {
886                        bump_by_pkg.insert(group_member.clone(), highest_bump);
887                    }
888                }
889            }
890        }
891    }
892}
893
894/// Prepare the release plan by matching bumps to workspace members
895fn prepare_release_plan(
896    bump_by_pkg: &BTreeMap<String, Bump>,
897    ws: &Workspace,
898) -> Result<ReleasePlan> {
899    // Map crate name -> CrateInfo for quick lookup
900    let mut by_name: BTreeMap<String, &CrateInfo> = BTreeMap::new();
901    for c in &ws.members {
902        by_name.insert(c.name.clone(), c);
903    }
904
905    let mut releases: Vec<(String, String, String)> = Vec::new(); // (name, old_version, new_version)
906    for (name, bump) in bump_by_pkg {
907        if let Some(info) = by_name.get(name) {
908            let old = if info.version.is_empty() {
909                "0.0.0".to_string()
910            } else {
911                info.version.clone()
912            };
913
914            let newv = bump_version(&old, *bump).unwrap_or_else(|_| old.clone());
915
916            releases.push((name.clone(), old, newv));
917        }
918    }
919
920    Ok(releases)
921}
922
923/// Print the planned releases
924fn print_release_plan(releases: &ReleasePlan) {
925    println!("Planned releases:");
926    for (name, old, newv) in releases {
927        println!("  {name}: {old} -> {newv}");
928    }
929}
930
931#[derive(Debug, Clone, Copy)]
932enum ReleaseDateTimezone {
933    Local,
934    Utc,
935    Offset(FixedOffset),
936    Named(Tz),
937}
938
939fn parse_release_date_timezone(spec: &str) -> Result<ReleaseDateTimezone> {
940    let trimmed = spec.trim();
941    let invalid_value = || {
942        SampoError::Config(format!(
943            "Unsupported changelog.release_date_timezone value '{trimmed}'. Use 'UTC', 'local', a fixed offset like '+02:00', or an IANA timezone name such as 'Europe/Paris'."
944        ))
945    };
946    if trimmed.is_empty() {
947        return Ok(ReleaseDateTimezone::Local);
948    }
949
950    if trimmed.eq_ignore_ascii_case("local") {
951        return Ok(ReleaseDateTimezone::Local);
952    }
953
954    if trimmed.eq_ignore_ascii_case("utc") || trimmed.eq_ignore_ascii_case("z") {
955        return Ok(ReleaseDateTimezone::Utc);
956    }
957
958    if let Ok(zone) = trimmed.parse::<Tz>() {
959        return Ok(ReleaseDateTimezone::Named(zone));
960    }
961
962    let bytes = trimmed.as_bytes();
963    if bytes.len() < 2 {
964        return Err(invalid_value());
965    }
966
967    let sign = match bytes[0] as char {
968        '+' => 1,
969        '-' => -1,
970        _ => return Err(invalid_value()),
971    };
972
973    let remainder = &trimmed[1..];
974    if remainder.is_empty() {
975        return Err(invalid_value());
976    }
977
978    let (hour_part, minute_part) = if let Some(idx) = remainder.find(':') {
979        let (h, m) = remainder.split_at(idx);
980        if m.len() < 2 {
981            return Err(invalid_value());
982        }
983        (h, &m[1..])
984    } else if remainder.len() == 4 {
985        (&remainder[..2], &remainder[2..])
986    } else if remainder.len() == 2 {
987        (remainder, "00")
988    } else {
989        return Err(invalid_value());
990    };
991
992    let hours: u32 = hour_part.parse().map_err(|_| invalid_value())?;
993    let minutes: u32 = minute_part.parse().map_err(|_| invalid_value())?;
994
995    if hours > 23 || minutes > 59 {
996        return Err(SampoError::Config(format!(
997            "Unsupported changelog.release_date_timezone value '{trimmed}'. Hours must be <= 23 and minutes <= 59."
998        )));
999    }
1000
1001    let total_seconds = (hours * 3600 + minutes * 60) as i32;
1002    let offset = if sign >= 0 {
1003        FixedOffset::east_opt(total_seconds)
1004    } else {
1005        FixedOffset::west_opt(total_seconds)
1006    };
1007
1008    match offset {
1009        Some(value) => Ok(ReleaseDateTimezone::Offset(value)),
1010        None => Err(SampoError::Config(format!(
1011            "Unsupported changelog.release_date_timezone value '{trimmed}'. Offset is out of range."
1012        ))),
1013    }
1014}
1015
1016fn compute_release_date_display(cfg: &Config) -> Result<Option<String>> {
1017    compute_release_date_display_with_now(cfg, Utc::now())
1018}
1019
1020fn compute_release_date_display_with_now(
1021    cfg: &Config,
1022    now: DateTime<Utc>,
1023) -> Result<Option<String>> {
1024    if !cfg.changelog_show_release_date {
1025        return Ok(None);
1026    }
1027
1028    let format_str = cfg.changelog_release_date_format.trim();
1029    if format_str.is_empty() {
1030        return Ok(None);
1031    }
1032
1033    let timezone_pref = cfg
1034        .changelog_release_date_timezone
1035        .as_deref()
1036        .map(str::trim)
1037        .filter(|s| !s.is_empty())
1038        .map(parse_release_date_timezone)
1039        .transpose()?;
1040
1041    let tz = timezone_pref.unwrap_or(ReleaseDateTimezone::Local);
1042
1043    let formatted = match tz {
1044        ReleaseDateTimezone::Local => now.with_timezone(&Local).format(format_str).to_string(),
1045        ReleaseDateTimezone::Utc => now.format(format_str).to_string(),
1046        ReleaseDateTimezone::Offset(offset) => {
1047            now.with_timezone(&offset).format(format_str).to_string()
1048        }
1049        ReleaseDateTimezone::Named(zone) => now.with_timezone(&zone).format(format_str).to_string(),
1050    };
1051
1052    Ok(Some(formatted))
1053}
1054
1055/// Apply all releases: update manifests and changelogs
1056fn apply_releases(
1057    releases: &ReleasePlan,
1058    ws: &Workspace,
1059    messages_by_pkg: &mut BTreeMap<String, Vec<(String, Bump)>>,
1060    changesets: &[ChangesetInfo],
1061    cfg: &Config,
1062) -> Result<()> {
1063    // Build lookup maps
1064    let mut by_name: BTreeMap<String, &CrateInfo> = BTreeMap::new();
1065    for c in &ws.members {
1066        by_name.insert(c.name.clone(), c);
1067    }
1068
1069    let mut new_version_by_name: BTreeMap<String, String> = BTreeMap::new();
1070    for (name, _old, newv) in releases {
1071        new_version_by_name.insert(name.clone(), newv.clone());
1072    }
1073
1074    let manifest_metadata = ManifestMetadata::load(ws)?;
1075
1076    // Build releases map for dependency explanations
1077    let releases_map: BTreeMap<String, (String, String)> = releases
1078        .iter()
1079        .map(|(name, old, new)| (name.clone(), (old.clone(), new.clone())))
1080        .collect();
1081
1082    // Use unified function to detect all dependency explanations
1083    let dependency_explanations =
1084        detect_all_dependency_explanations(changesets, ws, cfg, &releases_map);
1085
1086    // Merge dependency explanations into existing messages
1087    for (pkg_name, explanations) in dependency_explanations {
1088        messages_by_pkg
1089            .entry(pkg_name)
1090            .or_default()
1091            .extend(explanations);
1092    }
1093
1094    let release_date_display = compute_release_date_display(cfg)?;
1095
1096    // Apply updates for each release
1097    for (name, old, newv) in releases {
1098        let info = by_name.get(name.as_str()).unwrap();
1099        let manifest_path = info.path.join("Cargo.toml");
1100        let text = fs::read_to_string(&manifest_path)?;
1101
1102        // Update manifest versions
1103        let (updated, _dep_updates) = update_manifest_versions(
1104            &manifest_path,
1105            &text,
1106            Some(newv.as_str()),
1107            &new_version_by_name,
1108            Some(&manifest_metadata),
1109        )?;
1110        fs::write(&manifest_path, updated)?;
1111
1112        let messages = messages_by_pkg.get(name).cloned().unwrap_or_default();
1113        update_changelog(
1114            &info.path,
1115            name,
1116            old,
1117            newv,
1118            &messages,
1119            release_date_display.as_deref(),
1120        )?;
1121    }
1122
1123    Ok(())
1124}
1125
1126fn normalize_version_input(input: &str) -> std::result::Result<String, String> {
1127    let trimmed = input.trim();
1128    if trimmed.is_empty() {
1129        return Err("Version string cannot be empty".to_string());
1130    }
1131
1132    let boundary = trimmed
1133        .find(|ch: char| ['-', '+'].contains(&ch))
1134        .unwrap_or(trimmed.len());
1135    let (core, rest) = trimmed.split_at(boundary);
1136
1137    let parts: Vec<&str> = if core.is_empty() {
1138        Vec::new()
1139    } else {
1140        core.split('.').collect()
1141    };
1142
1143    if parts.is_empty() || parts.len() > 3 {
1144        return Err(format!(
1145            "Invalid semantic version '{input}': expected one to three numeric components"
1146        ));
1147    }
1148
1149    let mut normalized_parts = Vec::with_capacity(3);
1150    for part in &parts {
1151        if part.is_empty() {
1152            return Err(format!(
1153                "Invalid semantic version '{input}': found empty numeric component"
1154            ));
1155        }
1156        normalized_parts.push(*part);
1157    }
1158    while normalized_parts.len() < 3 {
1159        normalized_parts.push("0");
1160    }
1161
1162    let normalized_core = normalized_parts.join(".");
1163    Ok(format!("{normalized_core}{rest}"))
1164}
1165
1166pub(crate) fn parse_version_string(input: &str) -> std::result::Result<Version, String> {
1167    let normalized = normalize_version_input(input)?;
1168    Version::parse(&normalized).map_err(|err| format!("Invalid semantic version '{input}': {err}"))
1169}
1170
1171fn implied_prerelease_bump(version: &Version) -> std::result::Result<Bump, String> {
1172    if version.pre.is_empty() {
1173        return Err("Version does not contain a pre-release identifier".to_string());
1174    }
1175
1176    if version.minor == 0 && version.patch == 0 {
1177        Ok(Bump::Major)
1178    } else if version.patch == 0 {
1179        Ok(Bump::Minor)
1180    } else {
1181        Ok(Bump::Patch)
1182    }
1183}
1184
1185fn increment_prerelease(pre: &Prerelease) -> std::result::Result<Prerelease, String> {
1186    if pre.is_empty() {
1187        return Err("Pre-release identifier missing".to_string());
1188    }
1189
1190    let mut parts: Vec<String> = pre.as_str().split('.').map(|s| s.to_string()).collect();
1191    if parts.is_empty() {
1192        return Err("Pre-release identifier missing".to_string());
1193    }
1194
1195    let last_is_numeric = parts
1196        .last()
1197        .map(|part| part.chars().all(|ch| ch.is_ascii_digit()))
1198        .unwrap_or(false);
1199
1200    if last_is_numeric {
1201        let value = parts
1202            .last()
1203            .unwrap()
1204            .parse::<u64>()
1205            .map_err(|_| "Pre-release component is not a valid number".to_string())?;
1206        let incremented = value
1207            .checked_add(1)
1208            .ok_or_else(|| "Pre-release counter overflow".to_string())?;
1209        *parts.last_mut().unwrap() = incremented.to_string();
1210    } else {
1211        parts.push("1".to_string());
1212    }
1213
1214    let candidate = parts.join(".");
1215    Prerelease::new(&candidate).map_err(|err| format!("Invalid pre-release '{candidate}': {err}"))
1216}
1217
1218fn strip_trailing_numeric_identifiers(pre: &Prerelease) -> Option<Prerelease> {
1219    if pre.is_empty() {
1220        return None;
1221    }
1222
1223    let mut parts: Vec<&str> = pre.as_str().split('.').collect();
1224    while let Some(last) = parts.last() {
1225        if last.chars().all(|ch| ch.is_ascii_digit()) {
1226            parts.pop();
1227        } else {
1228            break;
1229        }
1230    }
1231
1232    if parts.is_empty() {
1233        None
1234    } else {
1235        let candidate = parts.join(".");
1236        Prerelease::new(&candidate).ok()
1237    }
1238}
1239
1240fn apply_base_bump(version: &mut Version, bump: Bump) -> std::result::Result<(), String> {
1241    match bump {
1242        Bump::Patch => {
1243            version.patch = version
1244                .patch
1245                .checked_add(1)
1246                .ok_or_else(|| "Patch component overflow".to_string())?;
1247        }
1248        Bump::Minor => {
1249            version.minor = version
1250                .minor
1251                .checked_add(1)
1252                .ok_or_else(|| "Minor component overflow".to_string())?;
1253            version.patch = 0;
1254        }
1255        Bump::Major => {
1256            version.major = version
1257                .major
1258                .checked_add(1)
1259                .ok_or_else(|| "Major component overflow".to_string())?;
1260            version.minor = 0;
1261            version.patch = 0;
1262        }
1263    }
1264    version.pre = Prerelease::EMPTY;
1265    version.build = BuildMetadata::EMPTY;
1266    Ok(())
1267}
1268
1269/// Bump a semver version string, including pre-release handling
1270pub fn bump_version(old: &str, bump: Bump) -> std::result::Result<String, String> {
1271    let mut version = parse_version_string(old)?;
1272    let original_pre = version.pre.clone();
1273
1274    if original_pre.is_empty() {
1275        apply_base_bump(&mut version, bump)?;
1276        return Ok(version.to_string());
1277    }
1278
1279    let implied = implied_prerelease_bump(&version)?;
1280
1281    if bump <= implied {
1282        version.pre = increment_prerelease(&original_pre)?;
1283        version.build = BuildMetadata::EMPTY;
1284        Ok(version.to_string())
1285    } else {
1286        let base_pre = strip_trailing_numeric_identifiers(&original_pre).ok_or_else(|| {
1287            format!(
1288                "Pre-release version '{old}' must include a non-numeric identifier before the counter"
1289            )
1290        })?;
1291
1292        apply_base_bump(&mut version, bump)?;
1293        version.pre = base_pre;
1294        Ok(version.to_string())
1295    }
1296}
1297
1298fn split_intro_and_versions(body: &str) -> (&str, &str) {
1299    let mut offset = 0;
1300    let len = body.len();
1301    while offset < len {
1302        if body[offset..].starts_with("## ") {
1303            return body.split_at(offset);
1304        }
1305
1306        match body[offset..].find('\n') {
1307            Some(newline_offset) => {
1308                offset += newline_offset + 1;
1309            }
1310            None => break,
1311        }
1312    }
1313
1314    (body, "")
1315}
1316
1317fn header_matches_release_version(header_text: &str, version: &str) -> bool {
1318    if header_text == version {
1319        return true;
1320    }
1321
1322    header_text
1323        .strip_prefix(version)
1324        .map(|rest| {
1325            let trimmed = rest.trim_start();
1326            trimmed.is_empty() || trimmed.starts_with('—') || trimmed.starts_with('-')
1327        })
1328        .unwrap_or(false)
1329}
1330
1331fn update_changelog(
1332    crate_dir: &Path,
1333    package: &str,
1334    old_version: &str,
1335    new_version: &str,
1336    entries: &[(String, Bump)],
1337    release_date_display: Option<&str>,
1338) -> Result<()> {
1339    let path = crate_dir.join("CHANGELOG.md");
1340    let existing = if path.exists() {
1341        fs::read_to_string(&path)?
1342    } else {
1343        String::new()
1344    };
1345    let cleaned = existing.trim_start_matches('\u{feff}');
1346    let (intro_part, versions_part) = split_intro_and_versions(cleaned);
1347    let mut intro = intro_part.to_string();
1348    let mut versions_body = versions_part.to_string();
1349
1350    if intro.trim().is_empty() {
1351        intro = format!("# {}\n\n", package);
1352    }
1353
1354    // Parse and merge the current top section only if it's an unpublished section.
1355    // Heuristic: if the top section header equals the current (old) version, it is published
1356    // and must be preserved. Otherwise, treat it as in-progress and merge its bullets.
1357    let mut merged_major: Vec<String> = Vec::new();
1358    let mut merged_minor: Vec<String> = Vec::new();
1359    let mut merged_patch: Vec<String> = Vec::new();
1360
1361    // helper to push without duplicates (preserve append order)
1362    let push_unique = |list: &mut Vec<String>, msg: &str| {
1363        if !list.iter().any(|m| m == msg) {
1364            list.push(msg.to_string());
1365        }
1366    };
1367
1368    // Collect new entries
1369    for (msg, bump) in entries {
1370        match bump {
1371            Bump::Major => push_unique(&mut merged_major, msg),
1372            Bump::Minor => push_unique(&mut merged_minor, msg),
1373            Bump::Patch => push_unique(&mut merged_patch, msg),
1374        }
1375    }
1376
1377    // If body starts with a previous top section (## ...), inspect its header.
1378    // If header == old_version => preserve it (do not merge or strip).
1379    // Else => parse and merge its bullets, then strip that section.
1380    let trimmed = versions_body.trim_start();
1381    if trimmed.starts_with("## ") {
1382        // Extract first header line text
1383        let mut lines_iter = trimmed.lines();
1384        let header_line = lines_iter.next().unwrap_or("").trim();
1385        let header_text = header_line.trim_start_matches("## ").trim();
1386
1387        let is_published_top = header_matches_release_version(header_text, old_version);
1388
1389        if !is_published_top {
1390            // Determine the extent of the first section in 'trimmed'
1391            let after_header_offset = header_line.len();
1392            let rest_after_header = &trimmed[after_header_offset..];
1393            // Find next section marker starting at a new line
1394            let next_rel = rest_after_header.find("\n## ");
1395            let (section_text, remaining) = match next_rel {
1396                Some(pos) => {
1397                    let end = after_header_offset + pos + 1; // include leading newline
1398                    (&trimmed[..end], &trimmed[end..])
1399                }
1400                None => (trimmed, ""),
1401            };
1402
1403            let mut current = None::<&str>;
1404            for line in section_text.lines() {
1405                let t = line.trim();
1406                if t.eq_ignore_ascii_case("### Major changes") {
1407                    current = Some("major");
1408                    continue;
1409                } else if t.eq_ignore_ascii_case("### Minor changes") {
1410                    current = Some("minor");
1411                    continue;
1412                } else if t.eq_ignore_ascii_case("### Patch changes") {
1413                    current = Some("patch");
1414                    continue;
1415                }
1416                if t.starts_with("- ") {
1417                    let msg = t.trim_start_matches("- ").trim();
1418                    match current {
1419                        Some("major") => push_unique(&mut merged_major, msg),
1420                        Some("minor") => push_unique(&mut merged_minor, msg),
1421                        Some("patch") => push_unique(&mut merged_patch, msg),
1422                        _ => {}
1423                    }
1424                }
1425            }
1426
1427            versions_body = remaining.to_string();
1428        }
1429    }
1430
1431    // Build new aggregated top section
1432    let mut section = String::new();
1433    match release_date_display.and_then(|d| (!d.trim().is_empty()).then_some(d)) {
1434        Some(date) => section.push_str(&format!("## {new_version} — {date}\n\n")),
1435        None => section.push_str(&format!("## {new_version}\n\n")),
1436    }
1437
1438    if !merged_major.is_empty() {
1439        section.push_str("### Major changes\n\n");
1440        for msg in &merged_major {
1441            section.push_str(&crate::markdown::format_markdown_list_item(msg));
1442        }
1443        section.push('\n');
1444    }
1445    if !merged_minor.is_empty() {
1446        section.push_str("### Minor changes\n\n");
1447        for msg in &merged_minor {
1448            section.push_str(&crate::markdown::format_markdown_list_item(msg));
1449        }
1450        section.push('\n');
1451    }
1452    if !merged_patch.is_empty() {
1453        section.push_str("### Patch changes\n\n");
1454        for msg in &merged_patch {
1455            section.push_str(&crate::markdown::format_markdown_list_item(msg));
1456        }
1457        section.push('\n');
1458    }
1459
1460    let mut combined = String::new();
1461    combined.push_str(&intro);
1462
1463    if !combined.is_empty() && !combined.ends_with("\n\n") {
1464        if combined.ends_with('\n') {
1465            combined.push('\n');
1466        } else {
1467            combined.push_str("\n\n");
1468        }
1469    }
1470
1471    combined.push_str(&section);
1472
1473    if !versions_body.trim().is_empty() {
1474        if !combined.ends_with("\n\n") {
1475            if combined.ends_with('\n') {
1476                combined.push('\n');
1477            } else {
1478                combined.push_str("\n\n");
1479            }
1480        }
1481        combined.push_str(&versions_body);
1482    }
1483
1484    fs::write(&path, combined)?;
1485    Ok(())
1486}
1487
1488/// Validate fixed dependencies configuration against the workspace
1489fn validate_fixed_dependencies(config: &Config, workspace: &Workspace) -> Result<()> {
1490    let workspace_packages: FxHashSet<String> =
1491        workspace.members.iter().map(|c| c.name.clone()).collect();
1492
1493    for (group_idx, group) in config.fixed_dependencies.iter().enumerate() {
1494        for package in group {
1495            if !workspace_packages.contains(package) {
1496                let available_packages: Vec<String> = workspace_packages.iter().cloned().collect();
1497                return Err(SampoError::Release(format!(
1498                    "Package '{}' in fixed dependency group {} does not exist in the workspace. Available packages: [{}]",
1499                    package,
1500                    group_idx + 1,
1501                    available_packages.join(", ")
1502                )));
1503            }
1504        }
1505    }
1506    Ok(())
1507}
1508
1509#[cfg(test)]
1510mod tests {
1511    use super::*;
1512    use chrono::TimeZone;
1513    use std::collections::BTreeMap;
1514
1515    #[test]
1516    fn preserves_changelog_intro_when_updating() {
1517        use std::fs;
1518        use tempfile::tempdir;
1519
1520        let temp = tempdir().unwrap();
1521        let crate_dir = temp.path();
1522        let intro = "# Custom Changelog Header\n\nIntro text before versions.\n\n";
1523        let existing = format!(
1524            "{}## 1.0.0 — 2024-06-19\n\n### Patch changes\n\n- Existing entry\n",
1525            intro
1526        );
1527        fs::write(crate_dir.join("CHANGELOG.md"), existing).unwrap();
1528
1529        let entries = vec![("Add new feature".to_string(), Bump::Minor)];
1530        update_changelog(
1531            crate_dir,
1532            "my-package",
1533            "1.0.0",
1534            "1.0.1",
1535            &entries,
1536            Some("2024-06-20"),
1537        )
1538        .unwrap();
1539
1540        let updated = fs::read_to_string(crate_dir.join("CHANGELOG.md")).unwrap();
1541        assert!(updated.starts_with(intro));
1542
1543        let new_idx = updated.find("## 1.0.1").unwrap();
1544        let old_idx = updated.find("## 1.0.0").unwrap();
1545        assert!(new_idx >= intro.len());
1546        assert!(new_idx < old_idx);
1547        assert!(updated.contains("## 1.0.1 — 2024-06-20"));
1548        assert!(updated.contains("- Add new feature"));
1549        assert!(updated.contains("- Existing entry"));
1550    }
1551
1552    #[test]
1553    fn creates_default_header_when_missing_intro() {
1554        use std::fs;
1555        use tempfile::tempdir;
1556
1557        let temp = tempdir().unwrap();
1558        let crate_dir = temp.path();
1559
1560        let entries = vec![("Initial release".to_string(), Bump::Major)];
1561        update_changelog(crate_dir, "new-package", "0.1.0", "1.0.0", &entries, None).unwrap();
1562
1563        let updated = fs::read_to_string(crate_dir.join("CHANGELOG.md")).unwrap();
1564        assert!(updated.starts_with("# new-package\n\n## 1.0.0"));
1565    }
1566
1567    #[test]
1568    fn header_matches_release_version_handles_suffixes() {
1569        assert!(header_matches_release_version("1.0.0", "1.0.0"));
1570        assert!(header_matches_release_version(
1571            "1.0.0 — 2024-06-20",
1572            "1.0.0"
1573        ));
1574        assert!(header_matches_release_version("1.0.0-2024-06-20", "1.0.0"));
1575        assert!(!header_matches_release_version(
1576            "1.0.1 — 2024-06-20",
1577            "1.0.0"
1578        ));
1579    }
1580
1581    #[test]
1582    fn update_changelog_skips_blank_release_date() {
1583        use std::fs;
1584        use tempfile::tempdir;
1585
1586        let temp = tempdir().unwrap();
1587        let crate_dir = temp.path();
1588        let entries = vec![("Bug fix".to_string(), Bump::Patch)];
1589
1590        update_changelog(
1591            crate_dir,
1592            "blank-date",
1593            "0.1.0",
1594            "0.1.1",
1595            &entries,
1596            Some("   "),
1597        )
1598        .unwrap();
1599
1600        let updated = fs::read_to_string(crate_dir.join("CHANGELOG.md")).unwrap();
1601        assert!(updated.contains("## 0.1.1\n"));
1602        assert!(!updated.contains("—"));
1603    }
1604
1605    #[test]
1606    fn parse_release_date_timezone_accepts_utc() {
1607        match parse_release_date_timezone("UTC").unwrap() {
1608            ReleaseDateTimezone::Utc => {}
1609            _ => panic!("Expected UTC timezone"),
1610        }
1611    }
1612
1613    #[test]
1614    fn parse_release_date_timezone_accepts_offset() {
1615        match parse_release_date_timezone("+05:45").unwrap() {
1616            ReleaseDateTimezone::Offset(offset) => {
1617                assert_eq!(offset.local_minus_utc(), 5 * 3600 + 45 * 60);
1618            }
1619            _ => panic!("Expected fixed offset"),
1620        }
1621    }
1622
1623    #[test]
1624    fn parse_release_date_timezone_rejects_invalid() {
1625        let err = parse_release_date_timezone("Not/AZone").unwrap_err();
1626        let msg = err.to_string();
1627        assert!(msg.contains("release_date_timezone"));
1628    }
1629
1630    #[test]
1631    fn compute_release_date_display_uses_utc() {
1632        let cfg = Config {
1633            changelog_release_date_format: "%Z".to_string(),
1634            changelog_release_date_timezone: Some("UTC".to_string()),
1635            ..Default::default()
1636        };
1637
1638        let now = Utc.with_ymd_and_hms(2024, 1, 15, 12, 0, 0).unwrap();
1639        let display = compute_release_date_display_with_now(&cfg, now)
1640            .unwrap()
1641            .unwrap();
1642        assert_eq!(display, "UTC");
1643    }
1644
1645    #[test]
1646    fn parse_release_date_timezone_accepts_named_zone() {
1647        match parse_release_date_timezone("Europe/Paris").unwrap() {
1648            ReleaseDateTimezone::Named(zone) => {
1649                assert_eq!(zone, chrono_tz::Europe::Paris);
1650            }
1651            _ => panic!("Expected named timezone"),
1652        }
1653    }
1654
1655    #[test]
1656    fn compute_release_date_display_uses_offset() {
1657        let cfg = Config {
1658            changelog_release_date_format: "%z".to_string(),
1659            changelog_release_date_timezone: Some("-03:30".to_string()),
1660            ..Default::default()
1661        };
1662
1663        let now = Utc.with_ymd_and_hms(2024, 6, 1, 12, 0, 0).unwrap();
1664        let display = compute_release_date_display_with_now(&cfg, now)
1665            .unwrap()
1666            .unwrap();
1667        assert_eq!(display, "-0330");
1668    }
1669
1670    #[test]
1671    fn compute_release_date_display_uses_named_zone() {
1672        let cfg = Config {
1673            changelog_release_date_format: "%Z".to_string(),
1674            changelog_release_date_timezone: Some("America/New_York".to_string()),
1675            ..Default::default()
1676        };
1677
1678        let now = Utc.with_ymd_and_hms(2024, 1, 15, 12, 0, 0).unwrap();
1679        let display = compute_release_date_display_with_now(&cfg, now)
1680            .unwrap()
1681            .unwrap();
1682        assert_eq!(display, "EST");
1683    }
1684
1685    #[test]
1686    fn test_ignore_packages_in_dependency_cascade() {
1687        use crate::types::{CrateInfo, Workspace};
1688        use std::path::PathBuf;
1689
1690        // Create a mock workspace with packages
1691        let root = PathBuf::from("/tmp/test");
1692        let workspace = Workspace {
1693            root: root.clone(),
1694            members: vec![
1695                CrateInfo {
1696                    name: "main-package".to_string(),
1697                    version: "1.0.0".to_string(),
1698                    path: root.join("main-package"),
1699                    internal_deps: BTreeSet::new(),
1700                },
1701                CrateInfo {
1702                    name: "examples-package".to_string(),
1703                    version: "1.0.0".to_string(),
1704                    path: root.join("examples/package"),
1705                    internal_deps: BTreeSet::new(),
1706                },
1707                CrateInfo {
1708                    name: "benchmarks-utils".to_string(),
1709                    version: "1.0.0".to_string(),
1710                    path: root.join("benchmarks/utils"),
1711                    internal_deps: BTreeSet::new(),
1712                },
1713            ],
1714        };
1715
1716        // Create a config that ignores examples/* and benchmarks/*
1717        let config = Config {
1718            ignore: vec!["examples/*".to_string(), "benchmarks/*".to_string()],
1719            ..Default::default()
1720        };
1721
1722        // Create a dependency graph where main-package depends on the ignored packages
1723        let mut dependents = BTreeMap::new();
1724        dependents.insert(
1725            "main-package".to_string(),
1726            ["examples-package", "benchmarks-utils"]
1727                .iter()
1728                .map(|s| s.to_string())
1729                .collect(),
1730        );
1731
1732        // Start with main-package being bumped
1733        let mut bump_by_pkg = BTreeMap::new();
1734        bump_by_pkg.insert("main-package".to_string(), Bump::Minor);
1735
1736        // Apply dependency cascade
1737        apply_dependency_cascade(&mut bump_by_pkg, &dependents, &config, &workspace);
1738
1739        // The ignored packages should NOT be added to bump_by_pkg
1740        assert_eq!(bump_by_pkg.len(), 1);
1741        assert!(bump_by_pkg.contains_key("main-package"));
1742        assert!(!bump_by_pkg.contains_key("examples-package"));
1743        assert!(!bump_by_pkg.contains_key("benchmarks-utils"));
1744    }
1745
1746    #[test]
1747    fn test_ignored_packages_excluded_from_dependency_graph() {
1748        use crate::types::{CrateInfo, Workspace};
1749        use std::collections::BTreeSet;
1750        use std::path::PathBuf;
1751
1752        let root = PathBuf::from("/tmp/test");
1753        let workspace = Workspace {
1754            root: root.clone(),
1755            members: vec![
1756                CrateInfo {
1757                    name: "main-package".to_string(),
1758                    version: "1.0.0".to_string(),
1759                    path: root.join("main-package"),
1760                    internal_deps: ["examples-package".to_string()].into_iter().collect(),
1761                },
1762                CrateInfo {
1763                    name: "examples-package".to_string(),
1764                    version: "1.0.0".to_string(),
1765                    path: root.join("examples/package"),
1766                    internal_deps: BTreeSet::new(),
1767                },
1768            ],
1769        };
1770
1771        // Config that ignores examples/*
1772        let config = Config {
1773            ignore: vec!["examples/*".to_string()],
1774            ..Default::default()
1775        };
1776
1777        // Build dependency graph
1778        let dependents = build_dependency_graph(&workspace, &config);
1779
1780        // examples-package should not appear in the dependency graph because it's ignored
1781        // So main-package should not appear as a dependent of examples-package
1782        assert!(!dependents.contains_key("examples-package"));
1783
1784        // The dependency graph should be empty since examples-package is ignored
1785        // and main-package depends on it
1786        assert!(dependents.is_empty());
1787    }
1788}